From 49d36d3732410e546fcb121cb476b67a332719e2 Mon Sep 17 00:00:00 2001 From: Giancarlo Panichi Date: Wed, 31 Aug 2022 15:05:14 +0200 Subject: [PATCH] First Commit Signed-off-by: Giancarlo Panichi --- .Dockerfile.swp | 0 .project | 17 + .pydevproject | 8 + .requirements.txt.swp | 0 .settings/org.eclipse.core.resources.prefs | 4 + Dockerfile | 37 ++ FUNDING.md | 26 ++ LICENSE.md | 311 +++++++++++++ MANIFEST.in | 3 + README.md | 96 ++++ canegatto.jpg | Bin 0 -> 44274 bytes dist/simpleimageclassifier-1.0.0.tar.gz | Bin 0 -> 43721 bytes requirements.txt | 9 + runexample.txt | 6 + setup.py | 42 ++ src/canegatto_out.jpg | Bin 0 -> 37831 bytes src/simpleimageclassifier.egg-info/PKG-INFO | 414 ++++++++++++++++++ .../SOURCES.txt | 146 ++++++ .../dependency_links.txt | 1 + .../entry_points.txt | 3 + .../top_level.txt | 1 + src/simpleimageclassifier/__init__.py | 2 + src/simpleimageclassifier/__main__.py | 6 + .../__pycache__/__init__.cpython-38.pyc | Bin 0 -> 233 bytes .../__pycache__/__main__.cpython-38.pyc | Bin 0 -> 307 bytes .../__pycache__/predictor.cpython-38.pyc | Bin 0 -> 7066 bytes .../simpleimageclassifier.cpython-38.pyc | Bin 0 -> 5213 bytes .../configs/Base-RCNN-C4.yaml | 18 + .../configs/Base-RCNN-DilatedC5.yaml | 31 ++ .../configs/Base-RCNN-FPN.yaml | 42 ++ .../configs/Base-RetinaNet.yaml | 25 ++ .../COCO-Detection/fast_rcnn_R_50_FPN_1x.yaml | 17 + .../faster_rcnn_R_101_C4_3x.yaml | 9 + .../faster_rcnn_R_101_DC5_3x.yaml | 9 + .../faster_rcnn_R_101_FPN_3x.yaml | 9 + .../faster_rcnn_R_50_C4_1x.yaml | 6 + .../faster_rcnn_R_50_C4_3x.yaml | 9 + .../faster_rcnn_R_50_DC5_1x.yaml | 6 + .../faster_rcnn_R_50_DC5_3x.yaml | 9 + .../faster_rcnn_R_50_FPN_1x.yaml | 6 + .../faster_rcnn_R_50_FPN_3x.yaml | 9 + .../faster_rcnn_X_101_32x8d_FPN_3x.yaml | 13 + .../COCO-Detection/fcos_R_50_FPN_1x.py | 11 + .../retinanet_R_101_FPN_3x.yaml | 8 + .../COCO-Detection/retinanet_R_50_FPN_1x.py | 11 + .../COCO-Detection/retinanet_R_50_FPN_1x.yaml | 5 + .../COCO-Detection/retinanet_R_50_FPN_3x.yaml | 8 + .../COCO-Detection/rpn_R_50_C4_1x.yaml | 10 + .../COCO-Detection/rpn_R_50_FPN_1x.yaml | 9 + .../mask_rcnn_R_101_C4_3x.yaml | 9 + .../mask_rcnn_R_101_DC5_3x.yaml | 9 + .../mask_rcnn_R_101_FPN_3x.yaml | 9 + .../mask_rcnn_R_50_C4_1x.py | 8 + .../mask_rcnn_R_50_C4_1x.yaml | 6 + .../mask_rcnn_R_50_C4_3x.yaml | 9 + .../mask_rcnn_R_50_DC5_1x.yaml | 6 + .../mask_rcnn_R_50_DC5_3x.yaml | 9 + .../mask_rcnn_R_50_FPN_1x.py | 8 + .../mask_rcnn_R_50_FPN_1x.yaml | 6 + .../mask_rcnn_R_50_FPN_1x_giou.yaml | 12 + .../mask_rcnn_R_50_FPN_3x.yaml | 9 + .../mask_rcnn_X_101_32x8d_FPN_3x.yaml | 13 + .../mask_rcnn_regnetx_4gf_dds_fpn_1x.py | 34 ++ .../mask_rcnn_regnety_4gf_dds_fpn_1x.py | 35 ++ .../Base-Keypoint-RCNN-FPN.yaml | 15 + .../keypoint_rcnn_R_101_FPN_3x.yaml | 8 + .../keypoint_rcnn_R_50_FPN_1x.py | 8 + .../keypoint_rcnn_R_50_FPN_1x.yaml | 5 + .../keypoint_rcnn_R_50_FPN_3x.yaml | 8 + .../keypoint_rcnn_X_101_32x8d_FPN_3x.yaml | 12 + .../Base-Panoptic-FPN.yaml | 11 + .../panoptic_fpn_R_101_3x.yaml | 8 + .../panoptic_fpn_R_50_1x.py | 8 + .../panoptic_fpn_R_50_1x.yaml | 5 + .../panoptic_fpn_R_50_3x.yaml | 8 + .../Cityscapes/mask_rcnn_R_50_FPN.yaml | 27 ++ .../configs/Detectron1-Comparisons/README.md | 84 ++++ .../faster_rcnn_R_50_FPN_noaug_1x.yaml | 17 + .../keypoint_rcnn_R_50_FPN_1x.yaml | 27 ++ .../mask_rcnn_R_50_FPN_noaug_1x.yaml | 20 + .../mask_rcnn_R_101_FPN_1x.yaml | 19 + .../mask_rcnn_R_50_FPN_1x.yaml | 19 + .../mask_rcnn_X_101_32x8d_FPN_1x.yaml | 23 + .../mask_rcnn_R_101_FPN_1x.yaml | 22 + .../mask_rcnn_R_50_FPN_1x.yaml | 22 + .../mask_rcnn_X_101_32x8d_FPN_1x.yaml | 26 ++ .../Misc/cascade_mask_rcnn_R_50_FPN_1x.yaml | 12 + .../Misc/cascade_mask_rcnn_R_50_FPN_3x.yaml | 15 + ...sk_rcnn_X_152_32x8d_FPN_IN5k_gn_dconv.yaml | 36 ++ .../mask_rcnn_R_50_FPN_1x_cls_agnostic.yaml | 10 + .../mask_rcnn_R_50_FPN_1x_dconv_c3-c5.yaml | 8 + .../mask_rcnn_R_50_FPN_3x_dconv_c3-c5.yaml | 11 + .../Misc/mask_rcnn_R_50_FPN_3x_gn.yaml | 21 + .../Misc/mask_rcnn_R_50_FPN_3x_syncbn.yaml | 24 + .../Misc/mmdet_mask_rcnn_R_50_FPN_1x.py | 152 +++++++ ...anoptic_fpn_R_101_dconv_cascade_gn_3x.yaml | 26 ++ .../scratch_mask_rcnn_R_50_FPN_3x_gn.yaml | 13 + .../scratch_mask_rcnn_R_50_FPN_9x_gn.yaml | 19 + .../scratch_mask_rcnn_R_50_FPN_9x_syncbn.yaml | 19 + .../configs/Misc/semantic_R_50_FPN_1x.yaml | 11 + .../configs/Misc/torchvision_imagenet_R_50.py | 150 +++++++ .../faster_rcnn_R_50_C4.yaml | 18 + .../faster_rcnn_R_50_FPN.yaml | 18 + .../configs/common/README.md | 6 + .../configs/common/coco_schedule.py | 47 ++ .../configs/common/data/coco.py | 48 ++ .../configs/common/data/coco_keypoint.py | 13 + .../common/data/coco_panoptic_separated.py | 26 ++ .../configs/common/data/constants.py | 9 + .../configs/common/models/cascade_rcnn.py | 36 ++ .../configs/common/models/fcos.py | 23 + .../common/models/keypoint_rcnn_fpn.py | 33 ++ .../configs/common/models/mask_rcnn_c4.py | 90 ++++ .../configs/common/models/mask_rcnn_fpn.py | 95 ++++ .../configs/common/models/mask_rcnn_vitdet.py | 59 +++ .../configs/common/models/panoptic_fpn.py | 20 + .../configs/common/models/retinanet.py | 55 +++ .../configs/common/optim.py | 28 ++ .../configs/common/train.py | 18 + .../mask_rcnn_R_101_FPN_100ep_LSJ.py | 9 + .../mask_rcnn_R_101_FPN_200ep_LSJ.py | 14 + .../mask_rcnn_R_101_FPN_400ep_LSJ.py | 14 + .../mask_rcnn_R_50_FPN_100ep_LSJ.py | 72 +++ .../mask_rcnn_R_50_FPN_200ep_LSJ.py | 14 + .../mask_rcnn_R_50_FPN_400ep_LSJ.py | 14 + .../mask_rcnn_R_50_FPN_50ep_LSJ.py | 14 + ...mask_rcnn_regnetx_4gf_dds_FPN_100ep_LSJ.py | 29 ++ ...mask_rcnn_regnetx_4gf_dds_FPN_200ep_LSJ.py | 14 + ...mask_rcnn_regnetx_4gf_dds_FPN_400ep_LSJ.py | 14 + ...mask_rcnn_regnety_4gf_dds_FPN_100ep_LSJ.py | 30 ++ ...mask_rcnn_regnety_4gf_dds_FPN_200ep_LSJ.py | 14 + ...mask_rcnn_regnety_4gf_dds_FPN_400ep_LSJ.py | 14 + .../configs/quick_schedules/README.md | 8 + ...mask_rcnn_R_50_FPN_inference_acc_test.yaml | 7 + ...scade_mask_rcnn_R_50_FPN_instant_test.yaml | 11 + ...fast_rcnn_R_50_FPN_inference_acc_test.yaml | 7 + .../fast_rcnn_R_50_FPN_instant_test.yaml | 15 + ...oint_rcnn_R_50_FPN_inference_acc_test.yaml | 7 + .../keypoint_rcnn_R_50_FPN_instant_test.yaml | 16 + ...R_50_FPN_normalized_training_acc_test.yaml | 30 ++ ...point_rcnn_R_50_FPN_training_acc_test.yaml | 28 ++ .../mask_rcnn_R_50_C4_GCV_instant_test.yaml | 18 + .../mask_rcnn_R_50_C4_inference_acc_test.yaml | 7 + .../mask_rcnn_R_50_C4_instant_test.yaml | 14 + .../mask_rcnn_R_50_C4_training_acc_test.yaml | 22 + ...mask_rcnn_R_50_DC5_inference_acc_test.yaml | 7 + ...mask_rcnn_R_50_FPN_inference_acc_test.yaml | 10 + .../mask_rcnn_R_50_FPN_instant_test.yaml | 14 + ...R_50_FPN_pred_boxes_training_acc_test.yaml | 6 + .../mask_rcnn_R_50_FPN_training_acc_test.yaml | 21 + .../panoptic_fpn_R_50_inference_acc_test.yaml | 7 + .../panoptic_fpn_R_50_instant_test.yaml | 19 + .../panoptic_fpn_R_50_training_acc_test.yaml | 20 + ...retinanet_R_50_FPN_inference_acc_test.yaml | 7 + .../retinanet_R_50_FPN_instant_test.yaml | 13 + .../rpn_R_50_FPN_inference_acc_test.yaml | 7 + .../rpn_R_50_FPN_instant_test.yaml | 13 + .../semantic_R_50_FPN_inference_acc_test.yaml | 10 + .../semantic_R_50_FPN_instant_test.yaml | 18 + .../semantic_R_50_FPN_training_acc_test.yaml | 20 + src/simpleimageclassifier/predictor.py | 219 +++++++++ .../simpleimageclassifier.py | 195 +++++++++ 162 files changed, 4227 insertions(+) create mode 100644 .Dockerfile.swp create mode 100644 .project create mode 100644 .pydevproject create mode 100644 .requirements.txt.swp create mode 100644 .settings/org.eclipse.core.resources.prefs create mode 100644 Dockerfile create mode 100644 FUNDING.md create mode 100644 LICENSE.md create mode 100644 MANIFEST.in create mode 100644 README.md create mode 100644 canegatto.jpg create mode 100644 dist/simpleimageclassifier-1.0.0.tar.gz create mode 100644 requirements.txt create mode 100644 runexample.txt create mode 100644 setup.py create mode 100644 src/canegatto_out.jpg create mode 100644 src/simpleimageclassifier.egg-info/PKG-INFO create mode 100644 src/simpleimageclassifier.egg-info/SOURCES.txt create mode 100644 src/simpleimageclassifier.egg-info/dependency_links.txt create mode 100644 src/simpleimageclassifier.egg-info/entry_points.txt create mode 100644 src/simpleimageclassifier.egg-info/top_level.txt create mode 100644 src/simpleimageclassifier/__init__.py create mode 100644 src/simpleimageclassifier/__main__.py create mode 100644 src/simpleimageclassifier/__pycache__/__init__.cpython-38.pyc create mode 100644 src/simpleimageclassifier/__pycache__/__main__.cpython-38.pyc create mode 100644 src/simpleimageclassifier/__pycache__/predictor.cpython-38.pyc create mode 100644 src/simpleimageclassifier/__pycache__/simpleimageclassifier.cpython-38.pyc create mode 100644 src/simpleimageclassifier/configs/Base-RCNN-C4.yaml create mode 100644 src/simpleimageclassifier/configs/Base-RCNN-DilatedC5.yaml create mode 100644 src/simpleimageclassifier/configs/Base-RCNN-FPN.yaml create mode 100644 src/simpleimageclassifier/configs/Base-RetinaNet.yaml create mode 100644 src/simpleimageclassifier/configs/COCO-Detection/fast_rcnn_R_50_FPN_1x.yaml create mode 100644 src/simpleimageclassifier/configs/COCO-Detection/faster_rcnn_R_101_C4_3x.yaml create mode 100644 src/simpleimageclassifier/configs/COCO-Detection/faster_rcnn_R_101_DC5_3x.yaml create mode 100644 src/simpleimageclassifier/configs/COCO-Detection/faster_rcnn_R_101_FPN_3x.yaml create mode 100644 src/simpleimageclassifier/configs/COCO-Detection/faster_rcnn_R_50_C4_1x.yaml create mode 100644 src/simpleimageclassifier/configs/COCO-Detection/faster_rcnn_R_50_C4_3x.yaml create mode 100644 src/simpleimageclassifier/configs/COCO-Detection/faster_rcnn_R_50_DC5_1x.yaml create mode 100644 src/simpleimageclassifier/configs/COCO-Detection/faster_rcnn_R_50_DC5_3x.yaml create mode 100644 src/simpleimageclassifier/configs/COCO-Detection/faster_rcnn_R_50_FPN_1x.yaml create mode 100644 src/simpleimageclassifier/configs/COCO-Detection/faster_rcnn_R_50_FPN_3x.yaml create mode 100644 src/simpleimageclassifier/configs/COCO-Detection/faster_rcnn_X_101_32x8d_FPN_3x.yaml create mode 100644 src/simpleimageclassifier/configs/COCO-Detection/fcos_R_50_FPN_1x.py create mode 100644 src/simpleimageclassifier/configs/COCO-Detection/retinanet_R_101_FPN_3x.yaml create mode 100644 src/simpleimageclassifier/configs/COCO-Detection/retinanet_R_50_FPN_1x.py create mode 100644 src/simpleimageclassifier/configs/COCO-Detection/retinanet_R_50_FPN_1x.yaml create mode 100644 src/simpleimageclassifier/configs/COCO-Detection/retinanet_R_50_FPN_3x.yaml create mode 100644 src/simpleimageclassifier/configs/COCO-Detection/rpn_R_50_C4_1x.yaml create mode 100644 src/simpleimageclassifier/configs/COCO-Detection/rpn_R_50_FPN_1x.yaml create mode 100644 src/simpleimageclassifier/configs/COCO-InstanceSegmentation/mask_rcnn_R_101_C4_3x.yaml create mode 100644 src/simpleimageclassifier/configs/COCO-InstanceSegmentation/mask_rcnn_R_101_DC5_3x.yaml create mode 100644 src/simpleimageclassifier/configs/COCO-InstanceSegmentation/mask_rcnn_R_101_FPN_3x.yaml create mode 100644 src/simpleimageclassifier/configs/COCO-InstanceSegmentation/mask_rcnn_R_50_C4_1x.py create mode 100644 src/simpleimageclassifier/configs/COCO-InstanceSegmentation/mask_rcnn_R_50_C4_1x.yaml create mode 100644 src/simpleimageclassifier/configs/COCO-InstanceSegmentation/mask_rcnn_R_50_C4_3x.yaml create mode 100644 src/simpleimageclassifier/configs/COCO-InstanceSegmentation/mask_rcnn_R_50_DC5_1x.yaml create mode 100644 src/simpleimageclassifier/configs/COCO-InstanceSegmentation/mask_rcnn_R_50_DC5_3x.yaml create mode 100644 src/simpleimageclassifier/configs/COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_1x.py create mode 100644 src/simpleimageclassifier/configs/COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_1x.yaml create mode 100644 src/simpleimageclassifier/configs/COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_1x_giou.yaml create mode 100644 src/simpleimageclassifier/configs/COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_3x.yaml create mode 100644 src/simpleimageclassifier/configs/COCO-InstanceSegmentation/mask_rcnn_X_101_32x8d_FPN_3x.yaml create mode 100644 src/simpleimageclassifier/configs/COCO-InstanceSegmentation/mask_rcnn_regnetx_4gf_dds_fpn_1x.py create mode 100644 src/simpleimageclassifier/configs/COCO-InstanceSegmentation/mask_rcnn_regnety_4gf_dds_fpn_1x.py create mode 100644 src/simpleimageclassifier/configs/COCO-Keypoints/Base-Keypoint-RCNN-FPN.yaml create mode 100644 src/simpleimageclassifier/configs/COCO-Keypoints/keypoint_rcnn_R_101_FPN_3x.yaml create mode 100644 src/simpleimageclassifier/configs/COCO-Keypoints/keypoint_rcnn_R_50_FPN_1x.py create mode 100644 src/simpleimageclassifier/configs/COCO-Keypoints/keypoint_rcnn_R_50_FPN_1x.yaml create mode 100644 src/simpleimageclassifier/configs/COCO-Keypoints/keypoint_rcnn_R_50_FPN_3x.yaml create mode 100644 src/simpleimageclassifier/configs/COCO-Keypoints/keypoint_rcnn_X_101_32x8d_FPN_3x.yaml create mode 100644 src/simpleimageclassifier/configs/COCO-PanopticSegmentation/Base-Panoptic-FPN.yaml create mode 100644 src/simpleimageclassifier/configs/COCO-PanopticSegmentation/panoptic_fpn_R_101_3x.yaml create mode 100644 src/simpleimageclassifier/configs/COCO-PanopticSegmentation/panoptic_fpn_R_50_1x.py create mode 100644 src/simpleimageclassifier/configs/COCO-PanopticSegmentation/panoptic_fpn_R_50_1x.yaml create mode 100644 src/simpleimageclassifier/configs/COCO-PanopticSegmentation/panoptic_fpn_R_50_3x.yaml create mode 100644 src/simpleimageclassifier/configs/Cityscapes/mask_rcnn_R_50_FPN.yaml create mode 100644 src/simpleimageclassifier/configs/Detectron1-Comparisons/README.md create mode 100644 src/simpleimageclassifier/configs/Detectron1-Comparisons/faster_rcnn_R_50_FPN_noaug_1x.yaml create mode 100644 src/simpleimageclassifier/configs/Detectron1-Comparisons/keypoint_rcnn_R_50_FPN_1x.yaml create mode 100644 src/simpleimageclassifier/configs/Detectron1-Comparisons/mask_rcnn_R_50_FPN_noaug_1x.yaml create mode 100644 src/simpleimageclassifier/configs/LVISv0.5-InstanceSegmentation/mask_rcnn_R_101_FPN_1x.yaml create mode 100644 src/simpleimageclassifier/configs/LVISv0.5-InstanceSegmentation/mask_rcnn_R_50_FPN_1x.yaml create mode 100644 src/simpleimageclassifier/configs/LVISv0.5-InstanceSegmentation/mask_rcnn_X_101_32x8d_FPN_1x.yaml create mode 100644 src/simpleimageclassifier/configs/LVISv1-InstanceSegmentation/mask_rcnn_R_101_FPN_1x.yaml create mode 100644 src/simpleimageclassifier/configs/LVISv1-InstanceSegmentation/mask_rcnn_R_50_FPN_1x.yaml create mode 100644 src/simpleimageclassifier/configs/LVISv1-InstanceSegmentation/mask_rcnn_X_101_32x8d_FPN_1x.yaml create mode 100644 src/simpleimageclassifier/configs/Misc/cascade_mask_rcnn_R_50_FPN_1x.yaml create mode 100644 src/simpleimageclassifier/configs/Misc/cascade_mask_rcnn_R_50_FPN_3x.yaml create mode 100644 src/simpleimageclassifier/configs/Misc/cascade_mask_rcnn_X_152_32x8d_FPN_IN5k_gn_dconv.yaml create mode 100644 src/simpleimageclassifier/configs/Misc/mask_rcnn_R_50_FPN_1x_cls_agnostic.yaml create mode 100644 src/simpleimageclassifier/configs/Misc/mask_rcnn_R_50_FPN_1x_dconv_c3-c5.yaml create mode 100644 src/simpleimageclassifier/configs/Misc/mask_rcnn_R_50_FPN_3x_dconv_c3-c5.yaml create mode 100644 src/simpleimageclassifier/configs/Misc/mask_rcnn_R_50_FPN_3x_gn.yaml create mode 100644 src/simpleimageclassifier/configs/Misc/mask_rcnn_R_50_FPN_3x_syncbn.yaml create mode 100644 src/simpleimageclassifier/configs/Misc/mmdet_mask_rcnn_R_50_FPN_1x.py create mode 100644 src/simpleimageclassifier/configs/Misc/panoptic_fpn_R_101_dconv_cascade_gn_3x.yaml create mode 100644 src/simpleimageclassifier/configs/Misc/scratch_mask_rcnn_R_50_FPN_3x_gn.yaml create mode 100644 src/simpleimageclassifier/configs/Misc/scratch_mask_rcnn_R_50_FPN_9x_gn.yaml create mode 100644 src/simpleimageclassifier/configs/Misc/scratch_mask_rcnn_R_50_FPN_9x_syncbn.yaml create mode 100644 src/simpleimageclassifier/configs/Misc/semantic_R_50_FPN_1x.yaml create mode 100644 src/simpleimageclassifier/configs/Misc/torchvision_imagenet_R_50.py create mode 100644 src/simpleimageclassifier/configs/PascalVOC-Detection/faster_rcnn_R_50_C4.yaml create mode 100644 src/simpleimageclassifier/configs/PascalVOC-Detection/faster_rcnn_R_50_FPN.yaml create mode 100644 src/simpleimageclassifier/configs/common/README.md create mode 100644 src/simpleimageclassifier/configs/common/coco_schedule.py create mode 100644 src/simpleimageclassifier/configs/common/data/coco.py create mode 100644 src/simpleimageclassifier/configs/common/data/coco_keypoint.py create mode 100644 src/simpleimageclassifier/configs/common/data/coco_panoptic_separated.py create mode 100644 src/simpleimageclassifier/configs/common/data/constants.py create mode 100644 src/simpleimageclassifier/configs/common/models/cascade_rcnn.py create mode 100644 src/simpleimageclassifier/configs/common/models/fcos.py create mode 100644 src/simpleimageclassifier/configs/common/models/keypoint_rcnn_fpn.py create mode 100644 src/simpleimageclassifier/configs/common/models/mask_rcnn_c4.py create mode 100644 src/simpleimageclassifier/configs/common/models/mask_rcnn_fpn.py create mode 100644 src/simpleimageclassifier/configs/common/models/mask_rcnn_vitdet.py create mode 100644 src/simpleimageclassifier/configs/common/models/panoptic_fpn.py create mode 100644 src/simpleimageclassifier/configs/common/models/retinanet.py create mode 100644 src/simpleimageclassifier/configs/common/optim.py create mode 100644 src/simpleimageclassifier/configs/common/train.py create mode 100644 src/simpleimageclassifier/configs/new_baselines/mask_rcnn_R_101_FPN_100ep_LSJ.py create mode 100644 src/simpleimageclassifier/configs/new_baselines/mask_rcnn_R_101_FPN_200ep_LSJ.py create mode 100644 src/simpleimageclassifier/configs/new_baselines/mask_rcnn_R_101_FPN_400ep_LSJ.py create mode 100644 src/simpleimageclassifier/configs/new_baselines/mask_rcnn_R_50_FPN_100ep_LSJ.py create mode 100644 src/simpleimageclassifier/configs/new_baselines/mask_rcnn_R_50_FPN_200ep_LSJ.py create mode 100644 src/simpleimageclassifier/configs/new_baselines/mask_rcnn_R_50_FPN_400ep_LSJ.py create mode 100644 src/simpleimageclassifier/configs/new_baselines/mask_rcnn_R_50_FPN_50ep_LSJ.py create mode 100644 src/simpleimageclassifier/configs/new_baselines/mask_rcnn_regnetx_4gf_dds_FPN_100ep_LSJ.py create mode 100644 src/simpleimageclassifier/configs/new_baselines/mask_rcnn_regnetx_4gf_dds_FPN_200ep_LSJ.py create mode 100644 src/simpleimageclassifier/configs/new_baselines/mask_rcnn_regnetx_4gf_dds_FPN_400ep_LSJ.py create mode 100644 src/simpleimageclassifier/configs/new_baselines/mask_rcnn_regnety_4gf_dds_FPN_100ep_LSJ.py create mode 100644 src/simpleimageclassifier/configs/new_baselines/mask_rcnn_regnety_4gf_dds_FPN_200ep_LSJ.py create mode 100644 src/simpleimageclassifier/configs/new_baselines/mask_rcnn_regnety_4gf_dds_FPN_400ep_LSJ.py create mode 100644 src/simpleimageclassifier/configs/quick_schedules/README.md create mode 100644 src/simpleimageclassifier/configs/quick_schedules/cascade_mask_rcnn_R_50_FPN_inference_acc_test.yaml create mode 100644 src/simpleimageclassifier/configs/quick_schedules/cascade_mask_rcnn_R_50_FPN_instant_test.yaml create mode 100644 src/simpleimageclassifier/configs/quick_schedules/fast_rcnn_R_50_FPN_inference_acc_test.yaml create mode 100644 src/simpleimageclassifier/configs/quick_schedules/fast_rcnn_R_50_FPN_instant_test.yaml create mode 100644 src/simpleimageclassifier/configs/quick_schedules/keypoint_rcnn_R_50_FPN_inference_acc_test.yaml create mode 100644 src/simpleimageclassifier/configs/quick_schedules/keypoint_rcnn_R_50_FPN_instant_test.yaml create mode 100644 src/simpleimageclassifier/configs/quick_schedules/keypoint_rcnn_R_50_FPN_normalized_training_acc_test.yaml create mode 100644 src/simpleimageclassifier/configs/quick_schedules/keypoint_rcnn_R_50_FPN_training_acc_test.yaml create mode 100644 src/simpleimageclassifier/configs/quick_schedules/mask_rcnn_R_50_C4_GCV_instant_test.yaml create mode 100644 src/simpleimageclassifier/configs/quick_schedules/mask_rcnn_R_50_C4_inference_acc_test.yaml create mode 100644 src/simpleimageclassifier/configs/quick_schedules/mask_rcnn_R_50_C4_instant_test.yaml create mode 100644 src/simpleimageclassifier/configs/quick_schedules/mask_rcnn_R_50_C4_training_acc_test.yaml create mode 100644 src/simpleimageclassifier/configs/quick_schedules/mask_rcnn_R_50_DC5_inference_acc_test.yaml create mode 100644 src/simpleimageclassifier/configs/quick_schedules/mask_rcnn_R_50_FPN_inference_acc_test.yaml create mode 100644 src/simpleimageclassifier/configs/quick_schedules/mask_rcnn_R_50_FPN_instant_test.yaml create mode 100644 src/simpleimageclassifier/configs/quick_schedules/mask_rcnn_R_50_FPN_pred_boxes_training_acc_test.yaml create mode 100644 src/simpleimageclassifier/configs/quick_schedules/mask_rcnn_R_50_FPN_training_acc_test.yaml create mode 100644 src/simpleimageclassifier/configs/quick_schedules/panoptic_fpn_R_50_inference_acc_test.yaml create mode 100644 src/simpleimageclassifier/configs/quick_schedules/panoptic_fpn_R_50_instant_test.yaml create mode 100644 src/simpleimageclassifier/configs/quick_schedules/panoptic_fpn_R_50_training_acc_test.yaml create mode 100644 src/simpleimageclassifier/configs/quick_schedules/retinanet_R_50_FPN_inference_acc_test.yaml create mode 100644 src/simpleimageclassifier/configs/quick_schedules/retinanet_R_50_FPN_instant_test.yaml create mode 100644 src/simpleimageclassifier/configs/quick_schedules/rpn_R_50_FPN_inference_acc_test.yaml create mode 100644 src/simpleimageclassifier/configs/quick_schedules/rpn_R_50_FPN_instant_test.yaml create mode 100644 src/simpleimageclassifier/configs/quick_schedules/semantic_R_50_FPN_inference_acc_test.yaml create mode 100644 src/simpleimageclassifier/configs/quick_schedules/semantic_R_50_FPN_instant_test.yaml create mode 100644 src/simpleimageclassifier/configs/quick_schedules/semantic_R_50_FPN_training_acc_test.yaml create mode 100644 src/simpleimageclassifier/predictor.py create mode 100644 src/simpleimageclassifier/simpleimageclassifier.py diff --git a/.Dockerfile.swp b/.Dockerfile.swp new file mode 100644 index 0000000..e69de29 diff --git a/.project b/.project new file mode 100644 index 0000000..a607677 --- /dev/null +++ b/.project @@ -0,0 +1,17 @@ + + + simpleimageclassifier + + + + + + org.python.pydev.PyDevBuilder + + + + + + org.python.pydev.pythonNature + + diff --git a/.pydevproject b/.pydevproject new file mode 100644 index 0000000..aa7a29a --- /dev/null +++ b/.pydevproject @@ -0,0 +1,8 @@ + + + + /${PROJECT_DIR_NAME} + + python interpreter + Default + diff --git a/.requirements.txt.swp b/.requirements.txt.swp new file mode 100644 index 0000000..e69de29 diff --git a/.settings/org.eclipse.core.resources.prefs b/.settings/org.eclipse.core.resources.prefs new file mode 100644 index 0000000..45d26e5 --- /dev/null +++ b/.settings/org.eclipse.core.resources.prefs @@ -0,0 +1,4 @@ +eclipse.preferences.version=1 +encoding//src/simpleimageclassifier/simpleimageclassifier.py=utf-8 +encoding/=UTF-8 +encoding/setup.py=utf-8 diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..6b97e2f --- /dev/null +++ b/Dockerfile @@ -0,0 +1,37 @@ +# Base +FROM ubuntu:20.04 + +RUN apt-get update +RUN apt-get upgrade -y +RUN apt-get install -y python3 python3-pip wget + + +# Istall deps +COPY ./requirements.txt / +RUN pip3 install -r requirements.txt + +RUN pip3 install torch==1.10.1+cpu torchvision==0.11.2+cpu torchaudio==0.10.1+cpu -f https://download.pytorch.org/whl/cpu/torch_stable.html +RUN pip3 install detectron2 -f \ + https://dl.fbaipublicfiles.com/detectron2/wheels/cpu/torch1.10/index.html + +# Install dist package sortapp +COPY ./dist/simpleimageclassifier-1.0.0.tar.gz / + +RUN pip3 install simpleimageclassifier-1.0.0.tar.gz + +COPY canegatto.jpg / +# +#RUN rm sortapp-1.0.0.tar.gz +#RUN rm requirements.txt +#RUN rm -r /root/.cache + +### Alternative ### +# Create a working directory and Bundle app source +# WORKDIR /simpleimageclassifier +# COPY src/simpleimageclassifier /simpleimageclassifier + +# Copy all subfolder +#ADD . / + +# Autorun +# CMD [ "python3", "./simpleimageclassifier.py" ] diff --git a/FUNDING.md b/FUNDING.md new file mode 100644 index 0000000..6fa9eac --- /dev/null +++ b/FUNDING.md @@ -0,0 +1,26 @@ +# Acknowledgments + +The projects leading to this software have received funding from a series of European Union programmes including: + +- the Sixth Framework Programme for Research and Technological Development + - [DILIGENT](https://cordis.europa.eu/project/id/004260) (grant no. 004260). +- the Seventh Framework Programme for research, technological development and demonstration + - [D4Science](https://cordis.europa.eu/project/id/212488) (grant no. 212488); + - [D4Science-II](https://cordis.europa.eu/project/id/239019) (grant no.239019); + - [ENVRI](https://cordis.europa.eu/project/id/283465) (grant no. 283465); + - [iMarine](https://cordis.europa.eu/project/id/283644) (grant no. 283644); + - [EUBrazilOpenBio](https://cordis.europa.eu/project/id/288754) (grant no. 288754). +- the H2020 research and innovation programme + - [SoBigData](https://cordis.europa.eu/project/id/654024) (grant no. 654024); + - [PARTHENOS](https://cordis.europa.eu/project/id/654119) (grant no. 654119); + - [EGI-Engage](https://cordis.europa.eu/project/id/654142) (grant no. 654142); + - [ENVRI PLUS](https://cordis.europa.eu/project/id/654182) (grant no. 654182); + - [BlueBRIDGE](https://cordis.europa.eu/project/id/675680) (grant no. 675680); + - [PerformFISH](https://cordis.europa.eu/project/id/727610) (grant no. 727610); + - [AGINFRA PLUS](https://cordis.europa.eu/project/id/731001) (grant no. 731001); + - [DESIRA](https://cordis.europa.eu/project/id/818194) (grant no. 818194); + - [ARIADNEplus](https://cordis.europa.eu/project/id/823914) (grant no. 823914); + - [RISIS 2](https://cordis.europa.eu/project/id/824091) (grant no. 824091); + - [EOSC-Pillar](https://cordis.europa.eu/project/id/857650) (grant no. 857650); + - [Blue Cloud](https://cordis.europa.eu/project/id/862409) (grant no. 862409); + - [SoBigData-PlusPlus](https://cordis.europa.eu/project/id/871042) (grant no. 871042); \ No newline at end of file diff --git a/LICENSE.md b/LICENSE.md new file mode 100644 index 0000000..c5e4f4b --- /dev/null +++ b/LICENSE.md @@ -0,0 +1,311 @@ +# European Union Public Licence V. 1.1 + + +EUPL © the European Community 2007 + + +This European Union Public Licence (the “EUPL”) applies to the Work or Software +(as defined below) which is provided under the terms of this Licence. Any use of +the Work, other than as authorised under this Licence is prohibited (to the +extent such use is covered by a right of the copyright holder of the Work). + +The Original Work is provided under the terms of this Licence when the Licensor +(as defined below) has placed the following notice immediately following the +copyright notice for the Original Work: + +Licensed under the EUPL V.1.1 + +or has expressed by any other mean his willingness to license under the EUPL. + + + +## 1. Definitions + +In this Licence, the following terms have the following meaning: + +- The Licence: this Licence. + +- The Original Work or the Software: the software distributed and/or + communicated by the Licensor under this Licence, available as Source Code and + also as Executable Code as the case may be. + +- Derivative Works: the works or software that could be created by the Licensee, + based upon the Original Work or modifications thereof. This Licence does not + define the extent of modification or dependence on the Original Work required + in order to classify a work as a Derivative Work; this extent is determined by + copyright law applicable in the country mentioned in Article 15. + +- The Work: the Original Work and/or its Derivative Works. + +- The Source Code: the human-readable form of the Work which is the most + convenient for people to study and modify. + +- The Executable Code: any code which has generally been compiled and which is + meant to be interpreted by a computer as a program. + +- The Licensor: the natural or legal person that distributes and/or communicates + the Work under the Licence. + +- Contributor(s): any natural or legal person who modifies the Work under the + Licence, or otherwise contributes to the creation of a Derivative Work. + +- The Licensee or “You”: any natural or legal person who makes any usage of the + Software under the terms of the Licence. + +- Distribution and/or Communication: any act of selling, giving, lending, + renting, distributing, communicating, transmitting, or otherwise making + available, on-line or off-line, copies of the Work or providing access to its + essential functionalities at the disposal of any other natural or legal + person. + + + +## 2. Scope of the rights granted by the Licence + +The Licensor hereby grants You a world-wide, royalty-free, non-exclusive, +sub-licensable licence to do the following, for the duration of copyright vested +in the Original Work: + +- use the Work in any circumstance and for all usage, reproduce the Work, modify +- the Original Work, and make Derivative Works based upon the Work, communicate +- to the public, including the right to make available or display the Work or +- copies thereof to the public and perform publicly, as the case may be, the +- Work, distribute the Work or copies thereof, lend and rent the Work or copies +- thereof, sub-license rights in the Work or copies thereof. + +Those rights can be exercised on any media, supports and formats, whether now +known or later invented, as far as the applicable law permits so. + +In the countries where moral rights apply, the Licensor waives his right to +exercise his moral right to the extent allowed by law in order to make effective +the licence of the economic rights here above listed. + +The Licensor grants to the Licensee royalty-free, non exclusive usage rights to +any patents held by the Licensor, to the extent necessary to make use of the +rights granted on the Work under this Licence. + + + +## 3. Communication of the Source Code + +The Licensor may provide the Work either in its Source Code form, or as +Executable Code. If the Work is provided as Executable Code, the Licensor +provides in addition a machine-readable copy of the Source Code of the Work +along with each copy of the Work that the Licensor distributes or indicates, in +a notice following the copyright notice attached to the Work, a repository where +the Source Code is easily and freely accessible for as long as the Licensor +continues to distribute and/or communicate the Work. + + + +## 4. Limitations on copyright + +Nothing in this Licence is intended to deprive the Licensee of the benefits from +any exception or limitation to the exclusive rights of the rights owners in the +Original Work or Software, of the exhaustion of those rights or of other +applicable limitations thereto. + + + +## 5. Obligations of the Licensee + +The grant of the rights mentioned above is subject to some restrictions and +obligations imposed on the Licensee. Those obligations are the following: + +Attribution right: the Licensee shall keep intact all copyright, patent or +trademarks notices and all notices that refer to the Licence and to the +disclaimer of warranties. The Licensee must include a copy of such notices and a +copy of the Licence with every copy of the Work he/she distributes and/or +communicates. The Licensee must cause any Derivative Work to carry prominent +notices stating that the Work has been modified and the date of modification. + +Copyleft clause: If the Licensee distributes and/or communicates copies of the +Original Works or Derivative Works based upon the Original Work, this +Distribution and/or Communication will be done under the terms of this Licence +or of a later version of this Licence unless the Original Work is expressly +distributed only under this version of the Licence. The Licensee (becoming +Licensor) cannot offer or impose any additional terms or conditions on the Work +or Derivative Work that alter or restrict the terms of the Licence. + +Compatibility clause: If the Licensee Distributes and/or Communicates Derivative +Works or copies thereof based upon both the Original Work and another work +licensed under a Compatible Licence, this Distribution and/or Communication can +be done under the terms of this Compatible Licence. For the sake of this clause, +“Compatible Licence” refers to the licences listed in the appendix attached to +this Licence. Should the Licensee’s obligations under the Compatible Licence +conflict with his/her obligations under this Licence, the obligations of the +Compatible Licence shall prevail. + +Provision of Source Code: When distributing and/or communicating copies of the +Work, the Licensee will provide a machine-readable copy of the Source Code or +indicate a repository where this Source will be easily and freely available for +as long as the Licensee continues to distribute and/or communicate the Work. + +Legal Protection: This Licence does not grant permission to use the trade names, +trademarks, service marks, or names of the Licensor, except as required for +reasonable and customary use in describing the origin of the Work and +reproducing the content of the copyright notice. + + + +## 6. Chain of Authorship + +The original Licensor warrants that the copyright in the Original Work granted +hereunder is owned by him/her or licensed to him/her and that he/she has the +power and authority to grant the Licence. + +Each Contributor warrants that the copyright in the modifications he/she brings +to the Work are owned by him/her or licensed to him/her and that he/she has the +power and authority to grant the Licence. + +Each time You accept the Licence, the original Licensor and subsequent +Contributors grant You a licence to their contributions to the Work, under the +terms of this Licence. + + + +## 7. Disclaimer of Warranty + +The Work is a work in progress, which is continuously improved by numerous +contributors. It is not a finished work and may therefore contain defects or +“bugs” inherent to this type of software development. + +For the above reason, the Work is provided under the Licence on an “as is” basis +and without warranties of any kind concerning the Work, including without +limitation merchantability, fitness for a particular purpose, absence of defects +or errors, accuracy, non-infringement of intellectual property rights other than +copyright as stated in Article 6 of this Licence. + +This disclaimer of warranty is an essential part of the Licence and a condition +for the grant of any rights to the Work. + + + +## 8. Disclaimer of Liability + +Except in the cases of wilful misconduct or damages directly caused to natural +persons, the Licensor will in no event be liable for any direct or indirect, +material or moral, damages of any kind, arising out of the Licence or of the use +of the Work, including without limitation, damages for loss of goodwill, work +stoppage, computer failure or malfunction, loss of data or any commercial +damage, even if the Licensor has been advised of the possibility of such +damage. However, the Licensor will be liable under statutory product liability +laws as far such laws apply to the Work. + + + +## 9. Additional agreements + +While distributing the Original Work or Derivative Works, You may choose to +conclude an additional agreement to offer, and charge a fee for, acceptance of +support, warranty, indemnity, or other liability obligations and/or services +consistent with this Licence. However, in accepting such obligations, You may +act only on your own behalf and on your sole responsibility, not on behalf of +the original Licensor or any other Contributor, and only if You agree to +indemnify, defend, and hold each Contributor harmless for any liability incurred +by, or claims asserted against such Contributor by the fact You have accepted +any such warranty or additional liability. + + + +## 10. Acceptance of the Licence + +The provisions of this Licence can be accepted by clicking on an icon “I agree” +placed under the bottom of a window displaying the text of this Licence or by +affirming consent in any other similar way, in accordance with the rules of +applicable law. Clicking on that icon indicates your clear and irrevocable +acceptance of this Licence and all of its terms and conditions. + +Similarly, you irrevocably accept this Licence and all of its terms and +conditions by exercising any rights granted to You by Article 2 of this Licence, +such as the use of the Work, the creation by You of a Derivative Work or the +Distribution and/or Communication by You of the Work or copies thereof. + + + +## 11. Information to the public + +In case of any Distribution and/or Communication of the Work by means of +electronic communication by You (for example, by offering to download the Work +from a remote location) the distribution channel or media (for example, a +website) must at least provide to the public the information requested by the +applicable law regarding the Licensor, the Licence and the way it may be +accessible, concluded, stored and reproduced by the Licensee. + + + +## 12. Termination of the Licence + +The Licence and the rights granted hereunder will terminate automatically upon +any breach by the Licensee of the terms of the Licence. + +Such a termination will not terminate the licences of any person who has +received the Work from the Licensee under the Licence, provided such persons +remain in full compliance with the Licence. + + + +## 13. Miscellaneous + +Without prejudice of Article 9 above, the Licence represents the complete +agreement between the Parties as to the Work licensed hereunder. + +If any provision of the Licence is invalid or unenforceable under applicable +law, this will not affect the validity or enforceability of the Licence as a +whole. Such provision will be construed and/or reformed so as necessary to make +it valid and enforceable. + +The European Commission may publish other linguistic versions and/or new +versions of this Licence, so far this is required and reasonable, without +reducing the scope of the rights granted by the Licence. New versions of the +Licence will be published with a unique version number. + +All linguistic versions of this Licence, approved by the European Commission, +have identical value. Parties can take advantage of the linguistic version of +their choice. + + + +## 14. Jurisdiction + +Any litigation resulting from the interpretation of this License, arising +between the European Commission, as a Licensor, and any Licensee, will be +subject to the jurisdiction of the Court of Justice of the European Communities, +as laid down in article 238 of the Treaty establishing the European Community. + +Any litigation arising between Parties, other than the European Commission, and +resulting from the interpretation of this License, will be subject to the +exclusive jurisdiction of the competent court where the Licensor resides or +conducts its primary business. + + + +## 15. Applicable Law + +This Licence shall be governed by the law of the European Union country where +the Licensor resides or has his registered office. + +This licence shall be governed by the Belgian law if: + +- a litigation arises between the European Commission, as a Licensor, and any +- Licensee; the Licensor, other than the European Commission, has no residence +- or registered office inside a European Union country. + + + +## Appendix + + + +“Compatible Licences” according to article 5 EUPL are: + + +- GNU General Public License (GNU GPL) v. 2 + +- Open Software License (OSL) v. 2.1, v. 3.0 + +- Common Public License v. 1.0 + +- Eclipse Public License v. 1.0 + +- Cecill v. 2.0 diff --git a/MANIFEST.in b/MANIFEST.in new file mode 100644 index 0000000..93c36a7 --- /dev/null +++ b/MANIFEST.in @@ -0,0 +1,3 @@ +include README.md +include LICENSE.md +recursive-include src/simpleimageclassifier/ * \ No newline at end of file diff --git a/README.md b/README.md new file mode 100644 index 0000000..3abdcb5 --- /dev/null +++ b/README.md @@ -0,0 +1,96 @@ +# SimpleImageClassifier + +SimpleImageClassifier is a simple example that allows you to classify a image jpg in input. +This example is based on [Detectron2](https://gitub.com/facebookresearch/detectron2 ) that is a Facebook AI Research's. +Starting from this example, you can first create an installable package via pip3 and then a docker image in which it is installed the created package. +The package declares the simpleimageclassifier command as entrypoint. +So once the package is installed you can use this command at command line. +Also, you can run it as a module, for example: + +``` +$ cd src +$ python3 -m simpleimageclassifier --config-file simpleimageclassifier/configs/COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_3x.yaml --input ../canegatto.jpg --output canegatto_out.jpg --opts MODEL.DEVICE cpu MODEL.WEIGHTS detectron2://COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_3x/137849600/model_final_f10217.pkl + +``` +The image created in this way can be executed in a container with the following command using an jpg file placed in it: + +``` +docker run -i -t --rm --name simpleimageclassifier-cont simpleimageclassifier simpleimageclassifier --config-file /usr/local/lib/python3.8/dist-packages/simpleimageclassifier/configs/COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_3x.yaml --input --output --opts MODEL.DEVICE cpu MODEL.WEIGHTS detectron2://COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_3x/137849600/model_final_f10217.pkl +``` +You can also run it directly from the container shell: + +``` +$ docker run -i -t --rm --name simpleimageclassifier-cont simpleimageclassifier bash + +root@7f371ac6f420:/# simpleimageclassifier --config-file /usr/local/lib/python3.8/dist-packages/detectron2/model_zoo/configs/COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_3x.yaml --input canegatto.jpg --output canegatto_out.jpg --opts MODEL.DEVICE cpu MODEL.WEIGHTS detectron2://COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_3x/137849600/model_final_f10217.pkl + +``` +Note the model set by --config-file can be take from detectron2 local lib in this case. + + +To be able to create an image from this application you need to have Docker and Docker-Compose installed on your machine and the relative python packages, see: +[Docker](https://docs.docker.com/engine/), +[Docker-Compose](https://docs.docker.com/compose/install/) and +[Docker Package for Python](https://pypi.org/project/docker/). + +## Useful Commands + +### Create Distribution Package +``` +python3 setup.py sdist --formats=gztar +``` +### Create Docker Image +``` +docker build -t simpleimageclassifier . +``` + +### Save Docker Image in file +``` +docker save simpleimageclassifier | gzip > simpleimageclassifier.tar.gz +``` + +### Publish Docker Image on DockerHub +Re-tagging an existing local image: + +``` +docker tag simpleimageclassifier /[:] +``` + +Login in DockerHub(use your Docker ID): + +``` +docker login +``` + +Now you can push this repository to the registry designated by its name or tag: + +``` +docker push /: +``` + +Then logout for security: + +``` +docker logout +``` + + +## Authors + +* **Giancarlo Panichi** ([ORCID](http://orcid.org/0000-0001-8375-6644)) - [ISTI-CNR Infrascience Group](http://nemis.isti.cnr.it/groups/infrascience) + + + +## License + +This project is licensed under the EUPL V.1.1 License - see the [LICENSE.md](LICENSE.md) file for details. + + +## About the gCube Framework +This software is part of the [gCubeFramework](https://www.gcube-system.org/ "gCubeFramework"): an +open-source software toolkit used for building and operating Hybrid Data +Infrastructures enabling the dynamic deployment of Virtual Research Environments +by favouring the realisation of reuse oriented policies. + +The projects leading to this software have received funding from a series of European Union programmes see [FUNDING.md](FUNDING.md) + diff --git a/canegatto.jpg b/canegatto.jpg new file mode 100644 index 0000000000000000000000000000000000000000..ea991e9a5edae4e120468fab56403c3413153ebd GIT binary patch literal 44274 zcmb@t1z21`wl3O0kU)SC+yVgt2^!oj1P>04L!j}-8+QpISmO{NNRXfn!5a@j8h3}r z-QDFeGk4~md(J!W&O0+}f4x_IRl90e@7jB~Q|2&>C9|u51BY1-Tit~jehMEZ-A*K^o;Cp(a*dN4L<*n-6 z45rS*OF>d3aj%am{%AlxkP-Z4Tmaf%y8VSc-Wr7f_0N#g zf4PDHjQ}7HxX$}ubNRn9%+%Va50<2 ztoHE6Q_e45P)5`oP`L=*sahvxB2{eV`Ja}&2C*nhWNjM4j>|0FUlB|pzYJJkt3Myr zIFp3)Y~pSH$GJ$Zd_~P2a%F18>~G=nK{&;BsmxB1M*@NSoO1)nXV9qtA40v@wl$uZ zn^MNyZLg9YkF3>~T^{8#c;Ut5)2NFw_bgIYmphFI4G#6YWm&$jT%v4uH`n1q)7sa8}NWt?8YDCR;X~$)8WG`DctYK}KWq&E>Tx({2e*h3|ef zv?V~-q6S^{?|1&!xG?Qi^3Bhlu*#`jU9mJP>bEoxfVcS%fIPL6)d0DlBylv@wARE`IrS7v?A5+Vds7FJ^hz+S zqyk=*5n!8F`n{vUn-N%DoIb>EKl%mCIoy=n;^WN?w1R zjKY+z8!^&RiXCn+1g5xW28ihh%zir*eA}|9wDq~cf|zV4c}xjnFtnap6&@rMRE)3s z{BPuFyf54I%r>INE^paj&~FpJEHD37&CF-kffL`!X>PlG+{M-dz8ZtcSm;F=EcUI#W#Xg-)Un-MddFc%Up%HkE z{jSfj-T0w(p>9jDKaHczM0pg*Zz2G5h)uBQU;A+X(OZ~y_}+NWsMlytsk?hlz?L-SqC(`G|aa-q;WK~*OCdp5J*~rfkesl3^2H8qH-q2O+ z{kslYFBtkmog1{*$8OQf8U5wHf9XhJXb;y)4n`_(Fx~MRRk_W%h_0-m7|l%4o_%T( zcLrA>sUxoXwzlW4&b)1Q&Dn6cie|ZTzqy*y`vgcbGb7kyd(wBEDmTnwSqE-O1E9rO4 z6X8)D?(>1g!oSJsZD=cD$M?FzZ{6AQDh)-(*u{i1U$Y%=oxp}HVC@6ueQX2`cXgQ3 z>0YFaEdh80jiL!ideWrg?@sU_nW5)icO*H~e!OIy8uIjc@plC^IBs>Ing@XGWwAGO zqps28Y^I{zQ&Y_X=;bRiUdvyA^=~0eGm=%18ED0?B!&d;N$@^+#rT^yGyB3mxYw%f zGZ!H%p5vTAoK=ao5TaR!y)8rzQ>`Mg*+l zAGs5IK^(eku--VK(lbs0c2`HirI2)*^I)M*b_G&QfWstAmWOr3Xt$~yv~z85Px4+fTIqvdhot|wp^x6rPtmI$_&|REFo`{YthwdZ z=Rx{ir^zaeYP17Z)cN$|$+ovSgn4{Ezjz_EpYRFP-zZ+d=wiRMSP6|Ud+NV!UV^u3 zAX802;qm$V(g6z^cUUl+-2tDrkSdG1jqEs(MWJ!8+t*2vvZ;Ui&5az;-~$8qpgCvA zYHirhQ~kzmh<-EIB65m1yKH&HGchz+d@1U*#(;bN&t; zuCq?7GYb>7u`u=>geDE|9Egqw9<4&*QKAp+&#d8YZnyi|T!pj{Iz%!K`kf;plw5wu zQa6-o;VW)u6CWAP1HVPVOy?pU<|L)+UPeelIc9TAmk67 zYO5WVe8)ASSq;PB%u@+}W({#ZhCJQMBObQAZX?rEtS&9W%2*s2Ska$gt=iTu%UJ!Iu9JQp`p#Ij^c6_}1{{ZIy0xCqs zWm$i-;*Y3vK0jU|eKir7LA^jKEjXZR?>2~Oy*2Uu=4$vf8OpQT@DT36@=FZ91uH%DvMO4 zg!rgFRg?;&>9BCw3S(6kOm;MoW|4sC1JgHw)xGeU2S9DHVuW!_-WQqYn9Kw<9X2W) z;#y~NhGbp=|0W7TYc84foYB>{#I8ORAkyym)#e$|>@u;({#m3}g|6=O;KY^0gpF&e zi;4boUSf>LW+h80Ph>~PfRMA4e2pJ;)NH&nEiI)Si4i}*U7CTT?$0i|@Mg=5%9k#2 zcC#Y#yKjJZ+q}f5vB{#b4V@+s-w9u@Qzz$pA>5=U9m$2&OvsMt3+M9BS9(W_z?5QJ z_Y#*e+TPyw%k{7DF|l{+iF6Y<21U>Ox3PDFh2+Wq#8xG{Fw2c^aR;{4LAlYgoj#2b zuIH;IEr4GC{uBe}4bXg-n5h;WcV|^s-Z};YvK}8G)cZ_KD_DO)f(LI+ZG2F)dFv)& zO8oUDucN0yXh_5mYTrh$_U0Eix8sSWcC8=RBlXiPk3zmBb+cjs-t1FPL? z3+)XW5t1NBgTL_2a=n+cV=?++GVe;@+wr}l8QHpzSHNSH9mlpU%h;_*RO`T5JF>EM zjN=B^U!&Zw8+F88lTtX<`(Af8YEoTO9mkGp!RO@#Dor$`PoHQ?`qkC(mTPNCnmB|= zL#^L~=SW%yVn8jSMoMqZ;V?!!?OjR9;IV5y5m*+Ua5(--uP)^7xxts0_PC50)^gKM zQxjdCLz=bjE;{L+E`v||0^|O43cywzTHX||I}SG+YZbb37A?$Nd{82Nfe>cP<%Mcv z@e>xx zTodDuHOvcn2j=XMKW2Q#k+r6jtPps8;NuCajM+`tJz+e*yLXIrG>XaunYuE#GH=TRMVPXywrCd)-#~oURK^dzPLHO>}oE z7UDDBYm9>^&m}Fm@apT<>`z*eB)NzETY)T@l&)ew>U1)|+D9Rx?Jx(jQ+7k0Fy*d2 z+i1x8G_)$c+~B#v%L|M^6|Pc>xB+SZ=mt0Xx3Y%%!qKnFXmR;*#G^clHFObdg+6pq zG~X^SSryl855I;|J!MZA(pD+jT#s-4)N!tkg^te@sBo1>z0lIkRvlkqm-~f|{nKZ= z(wThv4e;z!J0=%r=<9}{f_Ae=vsMJ~OuYnFi{@?9`4Vs43$+gPGgmnDZM47rv;@OX z?KGq(F%QM{+s7TJy&rGiaYWBWi|ZOEADpESZI|usE)Nf?II;g0XXe#{xyO#MyRMQm zlAS`9e&h!z`h0SP%W2>5{XJP{1t}k&XPgz`PeEV$=?4J7^l3Ex+y|txuTu3b>>4K5 zUs@f}2I2(%nWeVxwUS-^^t1P4r9jpx_P?s;|9Ahx@T^J**umBQk{x9GtLx3>95a?= z&kf?nb$7Rynolqqfux8d15<__p-Xl{7G^`;K{!!g&3Ea3!m{WuOOW=U7`Rz-jx`RJ=?-w42(}5^V@Xt2z$*Hq)<+$!Q+|&ofV_CT};k zFU{Bbvfb;BMO)HW`&v)moV&NqW-S{f&bbUTZ!fV6()jau9=xd`KI3G6ACx^TY!c-5YlVh786Es zKD-RmQ?k_I>!@NkVX$G&i|QF{;*2nM4GweyX!J8!7WGH{5%&1u4OeUYzUCSN6x6rq z`Cyu5>oQjOX^O4+{eZ3lJp#^crq!akpf~Wh7YQKKmZnB=c-~cV}voeT^%K zx3DOqV6?Pp(06_X1&=XnUo3pw5Fv+ETAvy`EGhU5`sJ?ijsxO}%KHTOO}t8L<2qj;`o%&$u)VEPN##p;n4tZ~i71r3eYad_I3p+0ksjp>R}ZRW9w_RBGQ_p1aP0v4}f~lRCRfAP#BU$jr*HVJRcg7 z$#=>fPl`C@EiX#~lIT?7^AuNWp^Oj(Caz?7*PlSFH|+Wytb@dr;*1VNYp;+7Vwi)}vzi)!5~()8vZ-n=c&N&6QuBH3gerQ(~W z-jpYHTV2B#Y?)5W4!E!2*1SLOROOu|_f-G>x_WkHO$w(qvA*kvpflFx=LKwv6$nm9 zt$`<;Gvvw9B+F`4j7}{{g^DOfX08omTh*Pee*( zE6z$utZ4K2*-oBSxXE6N>E564QvHc`X^c>1UsYM-kBB30Dn-N`URR>UA2&h7h;C_? zP1TzEu)$3nfAZGh?BI5Fk7#|k8u#T6bXRlUf&xiRY58=%TyTr{eMhv; zKkoi>_g@C@9H z&8KwP)C&Bx3d7d;magtsvHk1Hxtj`uh9HA>H{kg3^iyL2$hN$>WTDRSM7_iLXnbsK z3ad8~SQohK5K3Hk+2IS|(|RfFa3pzLdx17Jyj)q<)k;bU-XI;q26&ZN09!y2^A(JL z^s%Tiivu?6xx!7Lq-c7L_CfN|K{8dHzS0)0uWYFP7%W?>$e$P#=fCxFt==1@0-2=h zc~K70=Zol7$L~>2Pcft=Hz;Is&=zYa)rvyjcN%fMs~6hRA{>q&6w1H(6P3btf_JoRQn0RfwZc>8ABE>wkl||4HczzX{NIz?$A3s^)k&Y5N9y z&~z)1IojMel8uS2min zGREoG=j2({Lv*%IW2UW>p;K9-e(Jk7Mr(WE2>0NduM}p1D~=M9hkK{av-9!yZL=pX#x6gdkHL&^%-Dx zrY7onl~z0dwyMnPO;BeiIfHkH4qxe3@R!?=d5+#LW7gnv$M{a>p+ajYKcCDsc*975 z=9BPeTQqH6NcD)yGlMo5W3k@0rp-W}O!mtm{~j|o%wQ!s;apbtvVf144IZ6JAK00; zq(_s4d>Q!kK1oU~gJ#x+f$--CKO~Q725E>4-R3{6vFVv1%ose;1K{Ot>eP~1((_3K-j!Y&+>CTgbW$a7lb0qPSWKg&G zp)pqBRk?^-fl%Hh1PqIrtZqP?+fc2LKv#R#`}GAEC(KR)y&v<#pGzmB^S;?RGx4R- zV4hW>#KM>U`Fa6gmSvp)FJx7U-X&Lk4Grc5>{F=vO5WA$wsUH`GpL(N&`39vZA}*z z&Hl~J^LNel|DSuThw@bIO5-mFuQl&iz=OqO!(v0Lln}DmY9#6*i+5}#*PBAoiiwbW*nO;ba3Akh{Bnh2k z)|c(=kyEKRM4Vccjv*a%I@|2#+#Prz$Uf|NtwP;m<-tL=wqlk5qS8XEN=POg+zU!v zkZ{+SinL93ns)@J3kOT&xM~1-eKJhJ#qzrJ$nF_Uhxb2|1l?U$>=z*t-@es8xqO}3 zbE1ZNE(bG9TKfH=2P;K^rl>cYapHV_`~C6&LxZfV+GrXj9!u>k!yHoCnIim>g`Qg5 z`BEI6`lso#qqHu8RetK$x3&7cADYjfgN&y2<|SV@jj?8sqkwZ_Fj3zws`_vr^RXic z<}PX&@vUKrYmGc8d7TM3FZCu0T}|xu7|rtfgrazFwaz=v5^Z{VaEc4|R+#2uTVcZ3 ziD~0D&a>1rzJp-7oEUg?RYijbYrPk6cE*Eh7k2}~-B!CO&#Y~n89lK$>k}9qu|`W! z-{;ySlb*3VF*nbpAF&J?JA}Eu22Xe1!mOy*Ou*5N(3$>_0DKK-ZZXx(1_3a(f-z-{ zVRR2`!=9V7ao=9ju^6mJgRYZVmV1xVSHqexq2f=XNXCX~6Rg0}ZtIyjt07%K8#NZc?TBqoPBm zVGM}_O=MmS^@&y3n3Y589tyr!%8lxdHBDRM|GuJa9@_MmYpYpf~B(jovOp_RSb!4B7mA=RDVJLC$_xN47+X`5g1>u@1t* z=*7(Z{x*}01XxMYD}kEhc`sk@fOfEQI_fBU>=E9FUS#sS_akggIXjz5Bx@k*65Apy zthXc8l1B>{G_x0E_xo|{kC7f->uqaV;;~({iMMy@GkZa<6zer&$i0tM(_|L5%4W9S z#)Ek%MQh{kE79!fPbNA07Ul6NUetl789IG*^?zES%WaotlxuaW@9XsX4KJs9wKD{s z82uU9uF6zX`)Q$H=~M@Nn(z3vbEfWEJO3yu@w+byEQ<<{fc zGb%xXQo-3Rpx}EwsjWp`hdG;h)LPls>Q#>&tsZcx>_k&tYHh#ir{=)lVHAP^ZjN&t zvit^K-{NuP{-S~(2@}d=Eer_4+j&a^Uo@-@>W6h5FEC)8))$e*1zqz8P+0-1)5uLf zd;TA$K;5MEH;97p%eOz-q+^3*4`KKp#ygEu3;rpHvA96rYCmw?WyGbYFj1Q(8zWqD zI>~|KwjD5wWqsiO8c!uI=C!si|Fxu z_@GFn?Y!yHND-5rTRvL9Fl2Rb#-D{%vQtU9SBTb~*a@?ql~zM%+0Wy8-?hbtg-0)G zR0OQE7F6z8ac_mXT)c?Y#+78)yL=0ct10jk7bpQU*;XfD4;^R>b7q<+FL9Kz(>rC( zL{^bjd3^4y0hRhlT-rgN)jlDoCNEB}%i65&C24S+wu?oR3fLp0iFtf>ZUl8gbup;~OaPI)zw-z(T0aWkgda%j~au$Gh~=O$PCV^l-?$ff0Oa zzg#rh3PMFSTHrWUrZ@om+slKVR;g&M60Csd?wW+#;N#~N(leQ#A;K={&)`=R_}*{m zCu~j7+?@etO)oVB1E+_1d)L;E_@l!h<==Y4x3$I)(C^T&g1k|&M5UMH8P?J~>J~9z z&0!5o|8X8*Z~8Xzu)d}Bc0n=n6J=p`vOy=hP3k*)EPlEiVw?LOI?#b^>YN_A zVZ&obZP?Ax_$mGXp!a+4_Tk*y5m<^pddXP*BC;IMVgKdGbpYtoY&Q^Wx?8LdKOwo0 zp{K0FuCK}hayJ_ub8lM1TyPW$QPAtw%s9oY@yS-KA>X|K%)!h42@Rvy4*-`{Gw(Y6 zxXTr4QHB5@Sgm)B$a$1K=Lj@=C%Wq>QhX0iRxw5#iTlzf(;f4Gr2z)uX;RSnd1;fX zw}|vmgVOT}aqHQqTsL1Jz)90H!==79`Y?hXg|xk5 zQ+68e5UDEq?L2#i1GE;Wy-w!CCEyh+Zw4ik{d`?K*Mtqx5s?mSZeWN#|J#x)o0km;_!hLA```>5=$J$g(+-mFd+7|l*ob;5=6 zwc7Gq)_k}dES~OD>nee)CmP}xzk7mqoa#ttrODPsYfcS~{5fQbOf~N6jv#~cH!R*+X)XR#3Xpivhw=1Pz;b-F$1@O)!VE9hf+z&c2BNeI~ zb4ejBB|8g!;pq@TtQ3{r$?d{|wC@fTi#Rje4*+6Lcl~Bs4QfE1C`+z$NxS6VKB@n& zGdh?C@Pcr+Fk!D2I$V(W@dUglk$;+^G@3*Y*CteVQ@ za$6-x`l&)#b7aO8@)aSb)1^ve~-v z2MLFQ$rN{E?4n5bbI+t*l|Brb?)9G>6<&Y3queVf4EL~Uib^WVD-9)mW`0#ScA)c* z6VKQ(K=$Y%wY0W1C7+*Ajc(xGN7USyg5}a50DM=_s05+vT-5{`LL)=L3p`I=hSY={ zf`j7X<+{yHe<}lS&Zg?Ip<#US zlZ`qItIZ^64;du-|(kQfwfc#2W?RMQqOVGxY6rPIxPa> z!eu$$!i}sq;r$&6=3^4x!`i)$Oj=u0Lx z7X_A;C7PN1tc9G=^Vi@2K~qV}$BXzMLmr+fD%vJYI@KuV%Z<2g<|cYos36p^l3I}X~a@~T*vFH@e2mbhNWxpqjg?}b;`sQ@-ULW{+x+Pie4+${5iJqO%9 zJcI2Yo;q3d{#g>U;)@&aP6P7OT(cK54yqwv(+a8j$+e%eNEKvrUPiNBm=y99@*W?n z(Y04PC1yLabpGbQ<8wOq0sah>#+GxvuhYI=Bp*$sPvoeTXCayz!v`kx zQ@U)Oc&G#Ly>%Vd=B=?q&>BH&)V#^Wb zvAFSaxCvwFVQ-Qu>~^Q0o(5nBOv}HR@TTC&*U~jcGb7$a_aPt>Y`?>1>7hMtRKN)~ z)G}8L)gobSM8(P%zh0o5*oGfW6vaqfm?aeQ9J^%_{ zMPnF~xz(AJj=eG0oQsP1ikqIB9`H6SGC*?7Hu%xNO~Ssmg!5Gt8&_HWE{8rhpFl&4 z`IT9EV1?oj_A$c63Z7l$DPi3n{+*%J+uG#_3QOmuhSVJ*`l|8c9{R8bV6mt{>Q>)O zSm<&jdM(CAMQp35K(SKWWkrsYR@>U?PQ%L4;zwXufjl$dn+__)_3=Sm2trcKBJ7XdX z`u)C|eb318)_Pwx0*!@LO3M8R79!f@7a|0u%42Pd+%I}F4+K|h{#2@r7K%xrr_7z7 zxon$_(S}x4h;F$?c2u>zxWfANZS(li&2Zo#va6CXn^=E-&^b+M+p)@b+%OXutI!$e zdLgLs0O%<{I#@_|*cpVm=!t5Zk~JQ2A1|ECT1Vmxy03Fvf_elMqhKo z$ouYd3goucsL_6v`gSUexDx3{P(n9L*-k>cY7TW(0)KrYcY`}_t!ZV{cjo#;$9?Y= zo>P*4E&fI4gQm@|H>_`9lPmfT?hF^g$AQw+_w2)80=&~Sv_iAy6RM_pR%y0$e?UY^13ey8urnr&bt zD@mBHLKnI%bp6Qy#1spY7NU2Gc6c(F4g(A&FxU&k*zO^F_YdEG>I*WtNm3-w`8xT# z)Y;Sibk86A;&W8PeU$JH(Za6w4-~hh9)vAU&R-*@J(khr-_Y6{%FpRBd$9s11m?T- zDx&0?Nlku*XEnlO3()Z`9HUx-xZo36aBSk8M+ry&d-Ren5S<;6fFZ2@ynJWiYphul zaTo+2{s!G3yyGsRF&Ie;aLHzeb?>%&HcNK-8GVhI0v9kgq9kP9_O!zitz-W?Xo7*E z!D0=qPFi;d&^geNIH)VZApK>j6~@f#Tz7+B6CXE!u%9o1TyiM1Ll4e(L_}LS67F>$ z0M)*i>lcE9b0YBlM)CW^tpR$9of?SS$_1Y`9st*BMQd~AhT7?R&CloTa?DwGe>rIB zQ0y28xBd+uy3?+47fLH7kCbZ|t*3Vko%2gYMLZ{Efn2#f04_7y+%4VpR!i2utR|;W z2=s2-QCT(I`ZYWC?jSQd2+UvKW5t;!c*p+B(z&)JoGD+fEO zTJv9MJSLw6KLwrn@}ig%2x{LraGv0r6rd)mewbZZ3jFEsFrTnJ;)cb=4dtBHomXv| zI9S&~2A*8AUht<;i}_lr_1&BAek_A5?>QXd-L>Vso77ExyL%?nv7|LCyL=dfk4B)26Zh_&Jpjsdd;MRET1+;Dac>aT=sZzt>S!IH zjC0Apc9v)GxIS5nPeqay~&o589#OW@T&)azeDEejGuO8ir9C{6N2=-BUaz0 z6xVz8d&b|7nIBYD=( zq6%lXVn*Lr2G0*>a;e{@rKQs_?^A3GMKlaXfoVJS%sy6%_6OEW6HFd4V~F!14-x3ulr^2}Jl`37o?y$4IRQ9{>u81(yp< z-|L#|GQ0~FR}+#25zfjx@j2S*!6~lpLhRosg-HbBEBe1)%+g}-W^Ia-fb7lck{n0( zQ0fyGR(Cum318#q0s~D&c`K#K;$$ik0c@OwA8kQ%Y_2F%sMYl8C7IJn21B)mGn#{p zaGHUsLeTyji)VD0{A2rxdZnV;oT)2QBJ>!+zCaz_ILjWXP~>t!;{<8r3sHNDf>D#! z>3TjPPt3d=Sq2B~gWQxkYa=|K0wn{(8lUo5sV&OjVS<|T3=2i7@x(S28he?sD{aj= z8Uww@^c$;u%UKpF#87x?dAI9h5{#tH+MFq~8u?yL9ulapMZ@+b5M4RiaG02{fc>3S z5bU{^@>gnAuNl|Ppk#t!bbyVVq}nuk_V8s^?Q5ATX_LOm=K9g@Ck9i+VT>1yak0@g zwEWkFQQjWrrY7g?sU86{yCgZp)lm{+EFYWx5p@3_3U5Tp4kZeGP@2Y3~vA3jR_XcOzTZoC1OQ{*1w5%`aq%El~(A;&U7ds_6O3 zswn4g zTn8YPiy(Y?cD5@~EnBXV=0J|&k>l&omA=fgm&3m@s#z_oR@VDTi#Oav?)qR+cA<}` zk&`{M>&vBmrA?=J4gRn+kPcu~B>GHE^K2UP2U0u^yH0gy95XlQx!1^ZoV40-tD1{p z%t#NW&7gcZUOgi=THrV)^f9Uh#}wm?_l=cZ>sVIJW4K1@Qf&~)&$RQ98l(SWrq@FK zfm*<7Fh8UJtKL(e3^hL%Tc2rr!0y({u>ZJ(#JPEv^m&F_exqC2u&3Akf^h8C%5_bD z2|IHCc3)YYeVf9EPyg!94%G5JYCLH-17Luh%sFd2D%%j&d;863Se9R}jOkpA5Ps(4 zH^IYlJ{XaZ!4TkX%v6>P^Ce%X0_#Su65`wae!jBv7O`!NE3-jNRVg?u9E?|Gzvi6> zS+iM-m&Bou%P9ue?+wb286wuWfVm_Bshi{@5oPMSYn^?^X?4;Lzl`)3L ziA-ifMLD87why*(h>4LI%EKe1d~sEzE=bpnLNJMkulp>Nqg@}qugBcvx?ITf?Twa8{mGM5Gl1#CPNs zp5^tRO7<5KINT>TZsG`cXZ5pTulbzSAv z_WQH7)#OwHlHZKXTsQTc1mS{trS3D5+ZhG3V&cu=7byhKCkW)_ZlY81q8&&=&ejLA zqEH{h)MJ{p1^b_YT&yVknAtv1CJT9~@ahd#hUI~mzy7K3>8AX?g|4M;hjmjHr^)`otN) z9gDL@?h>#p%D$~Zo=k8d7!mLXVUJ`PSJ9=oa9h}$Y@l`mK|ZICL#X#3gT80{4%X{$ zzu#xB&7y9vpP`_0xRb;MKN%lj-v&zm9w(JRrAr0~W%vB4Zw!B~I7fl8no6%Wa8tIS zJo8)r#@mf5aT3_+;xHBhZ9o|L@!kL6%$$QVF0cKX*Ap6siX}IgoG~k(wOkG!Z^8Zc zJJ#${ob0%b2|M)GQ$q?zqYX?YR&#I*^IwLDrgYS-p@j_r_%yV~!+gJoGma0Ub&H1e z>h$^Khqiq%gU8l;q-)I7t0YyY_}68OLnSF;=H7V6d{rvu4}jzw`}DG7OX~|NgYpZh zIT{xS9>-habg-z;uAQoZ?Az0ZxY2ZTdY;z=78>Iit_gqS8vl%uzxvN9vXV>Ut>|@m zW@;%^RukI_2~n1HH}k#ZR)f=ZwWJ!pQJ5yn+Gz)Or5&`On5FNwn?Nubg%EqGg5_r+CjNFN4b$GW9Cvm`!PIl=twLkrgA?y&!#dJO zWN)|V6U7)ZgIC;UAu2XA&N&%0za7o61JlkA?>N$DDOc)N=w8dw8&(}3(Jd~*pf0KB zJ^e)khb`prC+l72SLB4x7^L@EhIGbsF8y$bA~g=>>o233ZCU2JiC%tYcz5E!n!XE_ z9BCcefsq_M0#|+VjlC7hea8M8x978bjiy#srPu8HtW}RPq-hVNnEp#Lf3?&EUKbOu zM@>{sQDdsU@@Sc@hPX1*>zm*1)pHq@mzTFV6icmV^}+|IzFp2LOc&=yT9jfw0GZzQmspUagPjpXINB(S(5$c>I ze0pwo*L5K_^#p%0a<*7o6cL(}6Q>LNm|%rxf|9#y6Xx13%NCzxitKz}DST8_9z^cT&_9TisBXHqH~+EVNl-;A$MUobfUF z70U8&e7M^yhlJQs;TzUIZL3xLd26}v!)K1!z?>z6x%KUBdF5j{L*qh&2SC!U)BqX} z+L3k-P?%qZ_wzaIn5F1k;aWxXVqnx&J<-a| z(fYPyW%;#(TPnw6dC>>&>-tsn>iuZ^)0Dbbxe`}T!;k{$K%365)1dWM(??UJi<~Bl>=f_+4o~WYqGrZWh2k|Y4 zG2V&dq2)6a4AB+%ueUNh7C)P#UH-bJo0nbkK z)xcKK#+@G%1T5~5w^4=#MZS=VE$?12J4bR_Hjxc!wQEa5g$J6zRcQmw&3kiZ<+>7E zFAWSj>s!>cc4L>ff2Ox)e=hpdo{Wo*UglI*rm0Bkbb{in=XDBv>R9!k0<>|Qd+1Fd zHCTd~{yt&mQJ}?eb;c=_<;DkqN>ygG?BH6d6n?+&;GOgYv~CS;kYo&)wN-R-Cw4)7 zb*1FM_YUE1+Z$R_O8e5-(5ozd^1iWY?$VpOz_)`^majg)_t7f65GuP@Y_(h)1tS7j zkAo9zbI!7qYa)6Jp?P($1WvhLNev@F(jVoPY} zZ1v*~eh^_BF0dRG`wpW#0HewlV07`Nc$xU+FbWDFE4<&xTlG2KV%A1-E%b}Fh;Z`j zSi(3sr~dsuH%!#!WHYkbpw>}}_PJAz^)`|E2>m-xD&L>h$r|szTIs4asl=tiH%n68 zr$|G;+Ib21wlmALc=dgF;!QyQGk(m-SEePLI7|$$SHlhOm9GEYrAUhS~4mE?0Ga_I4R(OcmUGHhKNg+R|QEF+FpMSnm5cO$igTnwVU_+vb@3PhDa> zZ9P3KL9H?;QWgCiU2_Yx8Zr5$MwEB#Q7DJ;HmFr`_>!DZP2(cEwl#|SbL@9tPGz#B zRoCE(;2^jcQ3tAN3_cyjn(n@;u{!-QgPx}(O)o&W8bs$YbrW~Y>_x}0rza4E5kkTs z@hX)TK4QK8^=wfzB)>p1XbN8Uo38BrBoD=1zL~7hRVB*J9Jl@Kg_QcX?~CXZ`yXtO z^gO<065{wz4A$(9S=4H#nsYLpq=d30*9HW7Rg=&QBbM#b5JeJUn)53bSqSe~KBLpE z!XORI-Ayd@m6wrw0T!o2=+;=F451A^+n-)=;EZR2)EMsD?4U&o&my{$%U;``Lf$Na zJ#Y2&WCKMt@4#uv#+(-qfR0JGa(S>fTDrwk7mZ(}US4)%98}9k6NrXlvG`u@s(Y?3 zhe?#UVJfSaH_^ zh2j?6oj{S`5L{X`6f5o&cWHrQMS8jC+;hhN?w7q^GDcqZntQJK`@W^2cPLxPn}2i< z={)n~|C8T|#^8tveTRzpjl2B2JMAD0Q*JmPgk8lI5TQ5+4xpa8uUDyr2X1S z>A&tAVI$!Hy5Yxt8)!oig6Oi-Px}4TSN*;F$42YNk5bZ{feYxJvG*W%;8{xb!vsS| zUGlnr;M2nNb|Id9+p(k53SjR(0XpZIF#1*8-}VPjQ^JJ&u@(|2m;fc6jO7*9r4hZz z?1!lMPlxBT&$;eO+!;&;%K~Qu z@+6e~9u0mx%`80z4}JA@J(Ka96iXvDcUt__ZjAq7d$cPndCOK_23T8~USJcqpws@$ zDA?K5x65J7$Yl?7rwBvRkG7Aw+`2l;pRFvOXUl4IC$z}=PhXCo2c#V+Y~6+8n|pte zA7Dq%6$|x61^3v~E0$>B)24qgOGo)g6%dNcbeK*5x;}E@2)5j6&yAQ;d$k&2KT;z9 z9&OdE%2lv?)c?S2sSMs2(8^~WR~nKN8D{*ZY9W6= z_SNg<&?RN+JO$mi4()k)nT^r8f^h|2o-H_#_BgUEC#xJY&o`d`P^N=q7@e}2PKT}1 z3%&=V%A8gNCW}S8Cn1`%8<#?qJP(78yQxH*m3 ztvYSEzCe0xYx4ntx;h>UC+@=SoY7B01N#n@iou#%_u z4%6i11ah5}m4$S3vq#CHgEiJllDB^w_%$ygkhAxt=iJuhD*cj~|l`wN|Nhgx8u--rT#`G!B47fMXoMWYf zDgiY@KUawTPH*X3y{M>xON6*%F;H*p)ff|2{Q~_dqJcOIu)K|yue|C~q+Ce>T@MU~PuCf>Nt(DtWh&-2F3dyL&9j! zyI>sJv@dH=wVRoL*q>{Y=kk2J(uw^s4cB(Ci;pi+Mt6V>es&_kiI&~V_nnbf(-^se2iZLw)XtTVp4J;=VK=9cWA zGaOg1q5n^eCUo{vh=rWEH5T9Ef^N9cSdO{5v^aqjpIeeV>Yi?KH2LH;z$XJYs-_Nw2D-&S5|mUuLEd7}7oe4YznH8OXcXmL`Mc{>6@tUdQM$KU^K z6KSM`=`RiO)wxPW=f7FRNaz?oM2Z{N>?vdt#pdJOeYGfWT0#C`RJy}$hYj}ND!;sO z!Nxd!D0WQn?F+uxGn>0a&r0ETDheD*6BH{V5{CWpd{o|DzZN|I&=jzI(@~de?=$(b z(oXCNm(JpN!gg@`_;mA%U}&BjaXPO^K~*`4!)r0|fme9`&e_q$^?4TW=LM$m_1L=T zEs>d11yS$z=ki8#VEbDmu93pA3DHn1>CF1ph|~Dc?Y8H31?owc5qLwrs%E^_pMsm2 zZMVcJlX_M|rDvGfDQqw)y;NDY|6|zDvoPLuJddN4I*~Si{0jQpZs#S&jAh&wMnf)LS~DhSERR+J)2; z*KGJ!-}7sY(tJz&D`<`Vq?R^m2FFb8E+J)dTaDGj8cA~{1wSySKMAg&uSZT%be3ZJ@?mFB@fSR(i13jz%v?xH+fsuw%E0QU&DGD@bQZR-;d#Qx01*s+|6+@ZNpgV#qX5{m|Q1u zW^DSs@7%u-oP%u@4P-t=v+!j+t(qp3UO|{ux&QN}4C_z|XdL!J3kLdEF(>A!fBQgw@d z8@$S`I1 zP4Y3*!L?#T1goOcDie@c0kKnfZ0^9%mF?O!mtUp=jU9j3-gw+$OMCrB+44c}N$0C6wIGg@R1d49JuD=lkC#RIvSywaGD<#Bx@%JDakmr4Um*xjAw!sR;U zoZ24s{P%hs33b1v&w{Imt(?Kz!!wFU4Z$Xf@RQ>KUC%k~bWyGkjKv&;WVfp~9+G-U z<;tW(O$=)aQ8k%^(H!NXgD-I$Pb@~kwrD#$Ce1p`sw?WFN*IO>dB_k6iR9$z*+~+z z#~G^6pOBWYt#goTxsCSa{tcls8ml#|V0%T0cxIgap~ug+?R#75A(kqaFeCr31ae?| z^QSb%hk47oyrd$=Ut$LEf|{Re!q2-RWHd{z-tMIu7>VwO=2fP*o=p};NJb`TBi~#i z>bMkr?U`CMHoIFzc`SC^a8#R#`n;pIoFqL8tjY}tc5cPPFG1)kAk7sq|EMWs^T-d# z&&ZzsNSWX=1^_(0XRq04wjWbSG&&=@nV;qZYIG?8nG6&&H zAamjC<@Pk)=+6I8d~72zcK7!67krfJ-7ZQDf zT%IS0Io{`VoyP;({Mmm{8Pqlke>(bM`Fpax;PPEeXhQl>PR-@u(XEwrt}%1n$PK@F z;^BgG&g-LLlS)s6y24EDk(+VvYNa4>&fQf&a%E+p) za@o*ZVILBWh#~k77B+ODA4Vxh79jKN-uNQ7&&EXO56u;%rw2O->{2 zw_*cv&>R0Zb0uA3FiSdSTv$^Ol{%-q68{ggpJ2YA(sxP05DkQp>vQBNAls4CRHRiY z(@Xxn^`%efyDj_F5y=U3vCa}M|MFL!YEfBm+>`4w zYscjg!ZvK7iRq^BwT1~aXmAeT9&NjQ>k}s1B71MK7m?&T^y$lr5w29D{85mMYQygz z9!B5oliOR8bd=4Fl_o1k=DP(QJ68Btq+4^{iJwP@QAE#M2XqP(PaVMou%fO2%3%~c zA^GIDnVzOXrIUW_Cdu}-X?E&%O*NcOb&`-T3RMddipuD-_IqiuCx96jkyw#4B2wSg z4Avu{r2gCax4yMJb8~3rdT(tNIL$d1zLf~^31mOmW|KU2>!N{s`tiumDz&X5N6@c44GR= zsz9 z4QKp0YBG+)^IP|T!FM#uWx@bVbcdR3noWOienLge|xj|Q3_cTvll_cR+^nyg!ao{!wXLZP)vk&we89d6+2hb zc^VFUFg|h|#76f$>f>3pB50ssrdughQ;+Ojig2fTjEZV|vew_N2vd+47vDyPC%Tqzy6JDQKw|9B8JnnD3Of98FVgMz&2(f1(;;cAT5` z6iRw+A6g~lf!3;ms-pj)Xni#8qSMSUXiB&WlSF8#5a)tNy+xWGVb{_wO>Hix?lWJ* z7-FVVpHRK-9v-WtHOs4QA3leSZBS4`-+NckR?1X9;Yw>pWFog7o~Ds3t?iLVCyljcBxu693|Bc${q}+BQ=sPP@*wNhig< z+B)v`bs?eYmzCLwkGUGI^j9n117vlA_(A*4qs8xay2aTHEN1SV^M*0ZPsSa&7&Duf zNa?x&nyS~6E7)=|o?+hZ=KHZt-tr$qLI>AI-$l0_Z-io8+}8>a_Vj*G$S#mfnfBIt zC&pGVt+LRoC$pmjPj>jVL*l|{A^-LDRzrW!*|BVw9Z?o2Awb&6`M9{9aV zxB3vaY0V2uy&L&Pv3bLH1^)Pxjd#!vEw`KwL%Yy9flsSp*}yH#|CH0Pe{mm0MVuoM zCMDHzEut_2b)!w6NlS?IOO^$Shufdu++eLB3D<=M8qz}lyv$XH`x-vfJRw>0Bfh;R z+qLYd>+{#Pm#A-FulTFo&DLb)a=kiU|H?WMk7v<9r^G!UV{sACY>OD?rFOuNC16Y+ z2kqW+%j*l~Ts!%I9mX$!WKo0)40AfFzmJG5ULfM_UE?*bVjl^OT14%3(Xq4=N6llI z6Lc>KlfXUlQ$ZDkbgr8YE-;)spWRgjwv6IXX7f- z8A)HN#_SUOd1e&IAPUtQ_^dUYdiodsU9`>MmMs(t`16-8So4v7&g1CLdr!RZ`ZSF< z;~JS+3hWX-ka=!p*7t9>|M-$l4TI$ouRy=SsJ^`9`Cf(&-25A{;rjj300#bjuf{M< zHHbsKO_RSwm43#q<>;$zEwA5-8DX^hp<6P`(Sjwf`(Ju0RsYhn`Sem9RXi!e-`DeW z@LLd4Cx${jNHZ)M-Yl52CTDk9?7?)=OjV1E@Gn|?Ctm5-q1&3+(o}CrNxIzC=e34;^N4wq=Q>TY6yVei5 z?-N2Lfko!7@!eOo|4=MSxAVOA8mhy|PkI&poL_Q#N~Z=StiRfJTL@LgsDVGP5Xb#V zW`XN@33A+}^jR^xs*@4z)PE0WrFibhe)QCQL3Bkcc~d#;1y@RXOEwff0<8jagk;eu zC11KPI;vubZn~P#I1KTP+M=8pyCC+yvtdgxq3TYdvzVU%l-iE;U;-*?#%K_Q-00Q! zfFzBBXsY{w4DmFbe9#*VZ3T=m>hBVf8-B2n@NPtcL54^H_v{Q@sF|8<75L}#bwWi} zv#&j0^u062fSx{StwlDsv3kG5WH1Nz&Em2(j7X`Sbju4Czu4#NY^-o5f)IVsSV5cO ze_n(KLlT*-`(;;#HOcv)Y+@A-LJJg&?`)d333DdD@9)Ky9k(ds6DBiz^I$VfaR2tG z(rlXucX@4Py7WUlcZiWHQI#G}O>%`FGgw~pTEmLJ^{uB|%s-S_;kPuzEl!X13*E&v zXlEQl&r|cv5@9j; zJ^{GyJ<{nZA;$a7$Y=Q`%`Fw|wV+g2@O7utb94WA^oV(G4D7J+nUW2(a;^gbM4x;2 z%iYqCMBj%KIRp|KftyLFFgWnT-De#ds|XNS2e65(X%nFiyySL0A$)njVFR|lVsevt z4!Rlz@2=oz!d%fa4S#P{(lQ^d-|w(QBg_8GS+uPv0(9LQ8yPu7MrG(7=Un711pczC zCt0bvVU?bQ;@{8%E`D}nD_E^GPt!q4ujDJmA#Rh5?Be$3{o1#%@>3V%YpAzzI;G6q ze}))f7_W^peu^3zUq=BCWhJy^eSSHuY+i^`7NH27kADiysbpAQWP|fvWQ?^nW{|h0 znu1=&@rbTImXmng*XsdJy1Jj=!3Rm|pn?E$?L_A>9yFx>$isS7iRHkT~nYcYe)Ub5u2`8**Z0Ar|aGkyqMI@gb7+(-_~! zTw_R&3i<8xu40+$5$3WA`RR7F777Z_B@_2uwHR+N?UT?m3RjQNjZRva)35bc{WVXq zQhjDQy;rc-+^`oolJTDA4RmYrS=vadgQ4|)-4s(O9b9K_HZb3oX<#-lFVQMYJTgCI zH|edVl4OQLT1huZ80L+;x~%7)2B=kiRuZ+J>yOtK*}X<)tlEJy$t!m((66nv>YWVUGRXtl2%G`mYSQ60# zv(!yY`^pw>F_;)$_mT*}+XlpKNN~!Z zu8~+=&za9~Y^os-{~|*NhFWL1gP50sQKY>k@X$<~i0lSh^ASHE65a!ZgzN)Z(0re#2*DGgvCR zhmpLxi7D&ME{}!F?SO_0qhQTOXEs`}JDoz!2b$nk+va#|*i?yJ*{h`VFG9MaeB^n8(nxrSbV8k5_p(g z_rfdN+~!+F*aqH-Ugos_1YvgqTr8TI2SC%jDb_e19HshI|KWYwNmxJ2rx+N zjTM(fP^L}S^&)e$2|fOyP*JC~;ZciB-i6jOJ0cr5^Tf8+(yWe0hi@ygTEwlF6oU@# zX8+Q@uyo|RewYE*(BN@!WHoZM^tE7KW+)C}Zt_{}h*!0`^LSeDc_IEpI>PNfyFmV- zhyU zNksZ*Gx-v7v5-AlPqIU#NIEM6LU#_DNth?>0FDaImE=6{B!(HaV4Zb+WN#%~mN<3K z+A4$1K5Q46N#{K+0UZOiVyKjbHjdI%7pvRutt|9~o@J?KRo&mfD;{yN)^AcmeZX&A z`;;r<-!%F8Xi`N1QX6!T8XrA7m&(7bNsL;oh#?pUE&Xa+$J{hFzGq*wChmW)OKT&3 z!fCUj?VEO75J4)QQ_+lYN()-vn;4cm1%_2Hy}{%WyIvZcBs|TG#W{&a7W65fQBD#U z>(&jmKdP@}4C8WYc;VZxr}~(dG^<^!m(&JiOk@7~h1C7y$61{~WA?1%F9ArPJe%{@ z26b?S=}yxKb%n|`x9S_9%pv0V5O_TUtLT|zuK@2PU0_#8#0wLOr`e`+ zR4aWI4{({LF~B^FMS)EE5tkcuf^YZl@W)T3}t@}^VOF(5=tAuX^$CV_J8u_HN zPS2qhUT2KwY}M-Wj6UhnE4Aa#&nL#H;+dZrQQ!FeF(UNkNe`_}fz}R!Q(qFv)qvy` z*Op{2EMB9Y9ElY*U|i-b@UO?>_$j^Sq54`o%;3VclYCa&kpGZyrI^&3-_3j;u!w?o z;v35p&2lpl+p-!#_bXoVr;sR%y_f4dtl^|e8?XSiFl@641(}od zkr7h5nMh&E?OaMw;LJ8_%RXw-Z$)e(7)6FQ?Ds}%QDdi9oxk2`L-odPcACY*snyLB zmD*6-9d3T3D1`<>=;*B=G6{$`%1Uyq8@mUShP$sXB&+=;l#;dG27DCF9SeLJvpuVH zvT_bG+H#!~DtO-Eqd6SwYlpeJjYeV0Pv41k_M&IH4xXAf6kP-?Q3s=AQ)Gt+%@Q2-81)WfduRx}ec)pY@Sba`92baiAFMz%^aR zuWCn`$u80L5N5Ca(L{4PW1b0FTmzEp5$t`OCtO^T3=g*HOogq`C=>pEu)G4pV7Kf9 z=Z_@>4Q7={N7Z#-8mtnIk^d}b^2qAw7-b<<*SnWdV9UxWH45uc=I$9zg}p0XEF^V< zjIvCk!6z$>fnM?-owGMWXG+jCq8(PkmVqGbk?$3ii$x%0CHTNIx^@#&y?Q`gos`&3 zIu$Fn-uzhu^dFbbjk`S?L!l9^YRNoO6CPSWQd9+f@rtSqa1A)B1e_^;>p(2Rm+{q5 z=#0$Q#*44v}J1!+X=r4@1-IwD9n#YkRLiq|t2&llim6M8Zh zEDza13xmFj1zjcY1gh^|Vu8Q| z>#IUHgsT5gEO*cNBhi}0Dq3LG0fClvr~gnY2X2(j{tENwya+t1t61{zxl_v!vn%5c zNz~_n(JNU$?YRau5t&sXb1W|k8&0bvJkj>(6H2GyTgD~qS%F+$La0$$C1!Uyfx6mt z7oE?uJ#(e+j6M4ze<6D0XRQG>rDhcc7`OARWF@_Ve_B5fna*M zWJKu5J4$Kd!RA#|cw<$?D4*AxsUzPh$epE{hm2#w#tD=c{1I8G1ydc5)_4{QP@LSm zY>op1A~3rpv?qErQ{iMt1PdnZ5(>JSG_JfCG*S}Rt$2F3QuPNTo~#YJrF+h$t&-?TAU_^0&dnAqYp(Mj~n5i}pe$*^~h z{v6ALY~3aaAcc5+469Bwc%LeG$O*p?nm5!@(PtvZAv5p>BKiZw|5l#=`@m(Eqi_X| zA+e_KSe!ICov|iceQgxz{g;8!rA991H%>DL%h3Q)Y@c^47P&Tr6JUqC27xiTCHMT? zO{8U4UWfUzEuWQgAzFP!Vi_Facxntc9q+)a)pLWb8JMvLvN2?g5sl-+LNyMgQAxSi z4ehgHM{rQ*YD^p6J|uAy_A=i45#dwgh@cfO8tQs%V^Oi*r@8{1-oA;q3LIwS4*6?6 zN!$_J`uvl=y)g>K4HAk>!5#kkmt!IQoq5|+IK9I+WIWHD<9_=%f$iD(8({R=)M(4~ zmfy+%ic;&o{Op*s?~j%^>T`a4Z-)(?1j*o)nZ|=XZfe5?jjihw6y~>(yI@d;9765q zUUrSq=qgp%m2vc(<(cSK_;Ty~(jfJxDlrLU-fn&uj7&^%l4-&b?HG(&dpSN%Xf+4Kg;v;@boRavs;pZp26k)aE{RdJl{^2KF;~rWg%*e;rs8! zE@hR3T|Ty|n+t(i3Z@^cuxElhAaC#ZiKI`tfQ-&*hi!L@DNIxErzGP$xdAx!h}+2uwPu5ehG4TE{IjIR3ZAYxk>IRrvGwG{ zm^}>pj`Z=%nz{bbVV|^BuB=|i-Zrd@6vU5X(r4@6D<9xA^xO(AgCUdDhbuHaz7-@A+G@>I7Iq-Bh**^c#xjPA2dTo+N zObI4Iaq~0T%mR0Q#6<4cG*E&{cn&sJu--#m9AJDiyM(!5#VQUFwJ}_>$9?}N*#ZctZsMOtqazO8ywT0U|>8%(m z^Rriqx(p1l7Y1SzgkPEnX!TZ--zw%Ox1KrP870~$^z|luF;S1o`F^{`b^mdN*wN+Q zzkO9C0ju&KiuRNdU-c?t8i}^N7p1&6RqOsR2=T7D5z-jpqUmAi{fBZHv+MEqlU0X9 zQ5a7RzrdmjyO)6%If3{T*vYy5z>#kBQfs*($5DG67hKfjcmH zh@#=!xHk@raZ@gz+;UU2=6qIg%B_W(2;hY-L{Fu8q z{fjk++wypq{J?j<1LONVGdDn_c?EEmEM}U1W>0@|IszQD20G?V%Y6pYD%01s--qWP zwNn2&s`xoQu2gAL(QU)I_*3=7Ng(0)g$O@|A0rnIztw<-;_jggClzM-hgcmjWuiyu=S5?rPp-ky%* zLhKV5mCJR^^QC%+le-`6bQu&J(}01D2{SyxeA;4*Pr za#@_I*b%Af7fkj&myOibL z!YYz$7f+(=57->2gGme$?6|MQqQ1XQC~|~z9AkJdQGUGGOC+Hb%49B?tw@i2cU{HS zLHfVPf6$6d>SJ;>k^}HMii(Tqd%mH74N0GK5^UTZVP0g`u5ZeQxdRv-w9?3ST1%Am zl+<4DIW{sB(4SN^NVCM!OjiC}&;QZN<-5CNTQ%0Os#B?0?n&=8%Xmaa#%mPaKkUf% zv$SaJl{0RYCQ#mct;zJ|WNNFP;ZSokqSdjKKV-A20B*R@B)R5^(d!~{pGfl}v|7o_ z)-^2K*Y3=DX|N|JNNZlb*QS?2YyM6ry25nN_u%cLL$g-7u34YZI)V5khTPa;? z88|L&*)|%1ay@i$4TA;~V;5-n{ZaQPIqcyja#m;P7htCM^YK8ZlDXSz#1glgFu7st zGbG;6=xqZ8{?-QpPRMRkHQlsa?ywjh^1&G`tPFKdUGX6Y< z1nbU`Vx_&?QzkiT9i?I1&W#~?(50L=b;Z&y79+V*)bF0z4ra*#Xt5iZ(QWtK20_0? zZtlqbQjv0SEJ1+%sn1&!X{b>#p3jr}O?(<=(M$AauaIlQnaBC7&vnS(zlr+%P&bM2 z7cbQe{e^l`#3H)hr&@_vu@99dKL0HZBca|QpYus?4sLav0Q2In`sFO@UC`I5^fhAD z^LOu;UmySQ3MyU>CUL&n@6SJWPU|0xN&eH-{0`6lXtKl8^lw6{7GiWVH1i~sW9QSe zvXn%NZE}q|^uQ0!01tmY#{*Dwg<4ef8y$y)q3i&sq)(65YYwAaECN$-*G($_FRaUP zRr8L%11p8u@0tiGRw9M~FAX;*a(&1wJqS082IxdLdv)PqK4Fh6C3kzBgO43Fq6N8k zJ+bEunYNNdn~h2ThxR1~>x2P}a1m7WEiVY{Wh&bG(jYh?T87C*N1@9&xyafBiR_Y4 z813P=B&@CI+9jxXQKw{=i!I`BQFp{Ur#iezZI14756qCWqWRJw8ri9yV_@9*r?u)U zu>=laO+@1hPDr>-45u-znbssJv!$6jdUAmCdw1zQ*wLOeYJ5xd>L~T6%jO-KDWc*_ zn%yX5q4n_OhW#Mo9|}|R2e#`~<6T2$$F}C>c?4U>U_ecc>0u#ZcK-(&yHKfZ;ST}7 z6i662Gw^5w@eeCa@5MimWJ0Y4#V8#Q3zKIfFm1_s1MI14T*BfLy#0a6I$|Z2P8-RS$8`n2Z|9 z=jiWr6b=IB2l2jiSNOi9of3x|_shJHZ9L^CuioV%FMZB>I}hYblMFP|1wcpd2hTje zv;x|T=6d-_vz#%i#`qg*8MQW*{7;$*DP76RSP<>aM_(YjD;X0uo%yE;dd28Ws%!T= zY=ie|K`gKiyJ{K-KgkE{`W*4qnDL8QZpA{(o3-ecGu%C(HtfUa2Lq|xP2AW#BpYTa zah2?-Jy&yO3T?36D!L+b3H9>=czvv=XEV!_V%b~AjkVQ0?s?>|BsHPDD)$+kCOyWo ztf4Q>c-FchvLm(|UOeKm;vBN8j_MD6e;#f*;6gloFuVn>FXS={1*>A`9&^&E;CK zKlxS_B3=3gxEc;{SB)_6xeyb5!v%6lM>&!v z^=i0Ucf27jbbIf70hy?>vD=%daa-j{InIx9gA**FoAoac43)ev9Na4z9+rwab}cwN;ft+ec)Scv(j0n22hHo^ zWW>7s2eMLnsPH-1=JVXuYi%R0ck6cy1}XCWwb6Wgj>`^0U>{*34Aap18-t+Ns*4>B zP={(tN16YQs)dgIDuUrklF}mdF@i{FfexWXp_xQ4aCIb5J8&pAPA4IIEjjdM2xt6a zbkkEuQ;nSyI6Jfya9i}fg)+JSxh}v@kR+rq_^oX6w{}AeL>Pq|?oz?@xzZh+nAVeW2>Us2$>ft(HV4N3 z74Q72Z0qVQFyV^mV;P8BI|wg1M^!>_#8`ko=Bs#23LxQ#H+nkepqq`;^AWx8 zs^9ly_`ydeY`1=L8rFvAePD>cXn`6B%8Ulb17Y7hqv;ZdO^zUxoA-d0#8Z zmIQv`Pu=HXD$RJ76ir#6bJ?ocX9fO4DU(c|g4i;1^ZOTFT(Y^%q zs;+nBLz1yv>0Zh>1<<@aD#M$>BcSpX$sqzcguOqlaIyaS?(>(9*s53Fqsn=swxGS* zvMO~c-n)Jh{>ROv=pdMm<8w@;D*w?%w7ZzQ-F{Kurfe6ZsH=tlikpc528f6h>V0F~ zlQd}@8=V5CnF^3JU)qtz)89>iuePsx&m3|}W~QqC;n+NBbEepP$>1wQ6RoR;DYI9o zt4=!aJ}D&_*HStX{VUpfwpzo#s4kSJGf=C3Xa!=Z7)7l0>G|v!MEI~d# zs0+|qeP17`tR57+h!iBJ-x9my>S*9PWFGJq@06(+5ngz0A<^9v8xno?4+ZUJ)+dv0 z=8R66M{4`99}?N(t{3*xF!S%xV2B@Asy zuoH$OoTux0Mb#TSqtZ$S1ojV;S4u%s*RoVz@Thr-4bPhYY|OyvjwKVuFs-C01BV$4 zd)RTxmj3q#8bxDH_lq136nc0Ln}o{&j~CefULTV)cl_^3=mrW7{S}&72YqZuiH#Ts zJ3byNq0g5oee?vHw2(ql(!@MJM?4JGlDF-hTp0;;9)pSs)%DHcGX~xqIHzj4Zqy4Y zGm}MM0|?%zfR38wC3_Rt!Jnc^ltM!-hgOJ5=sJ=>jXKS~ny}cYj$ab&DT?=-n>iw| zk?2hJu#WhV1<*jwi`a!r4PI_1zC_1M22k}^yX95nSqADDq4wl0dyMIYqBS>fzAK{& zG-^^QRJ+w`D~#2d22oQLgBkUmP}ti>xMg@*nead<8Wo*PB`=>^14JKc9R6;1D>ZHh zaBnqDLe}|7Vzy@A9y{R6+>8XIP_@MU*{HVUPGz0pKAz)+YEMLDwWj4#%WmE*4>$o; zZ?E-~L~y*E;wRa2aUKJ=lq%ERMGkN?(9X}RjEgM97*V~AXz^?KHf>d;=ae6o5}4_1 zHE5`C zFdXZIuyL#q5u8>%a=_6YiRz6;MVuxoMng}&FgDK%?B8tM-4iv~f9Xrl(ac_JnNHio zaIn{~s?)Hb4xtWHyJNUo)*Sz}>m}j4tY!IRqo6Fo5-o{e8%rSP4c7L(09)DvFWI>2 zUweZOK6TQQi}yw2&oVI9;kF;JRUjQ{x0|&rtXN(F0qDdOjd=CvN3aG#`iuewcyGzB z5o_1-C!kS91K!WiiM2U)jlAtcP%oG5x>YT;=03Gx_g{ZLR~o+-#uVkD-V@K4_{cWT z*$TaxRKsRo^{TN0P02UB^j*{}FLKR}Z*rXlnC$6d2LD4LZ^|yY z;o+v&YFoIgim9d^Pjy$QjyOw>%)?cA<4Bkv5;(0&{r82`ksq6YcpUvXrwPIG8cv#X zFhF#DHs|i+^k>6{6#&?dQ}r(DzKokP*KO@(qJHk**}yYwBGkQn!uG}PvOb|JDO}t+ z0r!B64ZgpJTJfXl@GSGgQmba>@(02hikczOa`2VGlBC{`czk;|K=`#LgrNQ!@aMOF z`eKQFF;qPb*<|7X}{loSQ+)c&FtpkZ$DNE z`f=MNqTsXsS0h#pYTHv@rF=f4FgH%eM|%h#H~fu3t{CeQ4bRus%bM-A1uLW*Tdf%w zz;Kg$DmXn4YKqUaP>2=YO0g4ak<8W*$l@G+`B>!6LyH~jH#wgsv#F~`n=$DS-S~Jc zPhzLBuE)55rV0(5gySBzv5vnoN*=3izEBuMPvUrPM9SADWA%!@B-)An4NTHBMEgdY zHs$cD`M=$_?Q4o86yqR0Gjuiy4;UiDf7gj0+sW{f&6GW|HV1V@-QZda)~vl`8x`Yy=nY_={}_TwaW@31l<+%1F)Xm2nT5VD*pczpG!>t8pbKl7j|gaG5Dvl_V^-$J;&cUXes7 z?6%98+nF&=^r#r5p+`axsaiKdaPH_^ekad$$x*p{Zc+W0l)@AzT-r#cWT0qK(gp~F z@@qx=0N>bmW0B@g@c{+};rZVxBq{PoLOz!PibK(Q8Z1JsiUr!Ks(ubdu%q4uQ3h(Z zAxLc8jbDk+sG3G_Dc!LoTewq;)*|1%lcOJE@^lJd)-8nR#)KxzhVfe3|HI2G`#r*( zt7rceZX3#3_!UIYQCKT-_$X%pFGx*x7je?snsLVB!Klg=ZSYgYoAV3zAC)!^{HR42 zXrt`43pTvor^?qOB5Hsec-MMgU7y_kDrk|e?E1B4mf>log~%u+F&>((VqAMaw0Tm3h}J(*{G5YWn~P#KGs%Z zV4!p!{KHv&m{7ue-L)tKsB#fx=nATT{hdbmpnds}H9~d(iEE+m4-FMJlOct=y}xn1m0KmkM-_8k*flD;fYLpe*D5Q|73BAss*4;C&K2f0 ztw0wy+__z})ik-n0nX?)HKa+gqW1BgQhROmM1N;b%Jk-W?bo_9Uxdeshy;P*gI)y| z4%ZOXw}p<2Kb0Ms8bW{7kedfn|IId9cvZYR`$f4$NDH3DVHeu?EYBySWr^mN5=81J zL;1FT>Mg+I$VxKj!7H+aRpfcUy%o(HPCZqA%ho0;pR1EouvaxR+mp-rFl(P($uFc; zfcKV3m2&(lxRpVxQy5`IHTtym$MD#zjTzbK=8^Q;gDOpL{1AuGC7#uTkmuZjnJP1c zamQPXfyGG_`Lq$c^VjV>{DX^bKbKs$5ze8D~OOE1d)blD}@$ z$;NS(Nv$QBQ?D+`lvXi?O#kUQM4Z9k!XhJwQU$etcpX3VZrS)SmoaOqwp`DF zYdpql>bb-5OVpB7!EW|gpknOLy1sBVPO}|Ee&fOt87;BDhl}FeaZJEmR#z%Tx(Zm| zt8#D*1C6&A*PS9&irv<@0J{il)q_<)kf0BZH`eAn1$X@`-ru5^*EG^t1 ztg%s}mK{-EKVmNZyszM%ue6zWE%IVr-J9LSiT7&P=t3!^~}scE025&CzIPbs{pqJu-6ar?X;ka!}N(CT9!~);Oe`Q@9Q3(V8Sw zUG$fMdtBUvbO&I%V=iIcQ%fzuE^q*StZzkaVUOFdaJyqb zsV0jZI&%A0;^?H0vT@=163WqQmU_NZ_f_00HlV7?`xq%zxRqTA{1G$_ zM0>`FDR$a~7GEP<=8a;(mP(om`4llnK1PldC;%e8Bmv-p#F83nIS&*?qW1L&ExGOL zXb^bWhRxLy#rU31=kP%@VXG>vMrzO?ARr*OB%!)>L28xMprOG9vs4}j_@JtzbQhv1 zE3Hst-Twf#N(k5-1DsVw{{VKqc0ubLDr$n%lX{9P5w0>^0Cqty6lQ}(21O{u7Hvgt zTznFY$yv>WAk^~KrLR1;8i1}QwcFcyXk6{az$}=IoO&APbk#**M{yIU99%?zsF#-E zb&YKHkjr0R5mmq7W6x_5jj@yV>S^S%An)$PK(j;0=n9J>mM~m0IGr6N+|$7dUs~Hb zYL@`UHlc@ls!G9*W*2g7ZE~p4t%F~)yNu4pbO%DQM`ZZE5a+$Y!kT_57CNB&LejNwlVNP#k&|P#_Urr!s0MEc%v096F9Szbn--~>zl%6;M({UwXS0`7Q-`79S+LlO`8{O za~TbH+tjF5USdZ!0TI=&`(Ng@lM%1qKyp~Q8&COGhOPeqwP8`PN*NFYYG^2^-0+sq z7LhcvrN{9{PI+Z#?VlVGfGO$~ABr~@t!zE~Y|y#BxSC$Vd?<`{%b86klk#@XBA-H% zI93BE0!==`HCm}PBL&B^X)KNVO2bm3Lu8t6ZWjF0iU^no@?B(e zwOzwO(MpSL6#A&EtrXOvD-=QPG^!6NR8=oxVysIaq>iO!QB&2FXHc_UBC8TOyTp|_ zXQYe}5$b>7x=6;d+R;xkt1m-PC;|^3N+7g#9%`;FNZ$?AHE6ViX=T+e-&7S-PY*N} zq;rKq)dhld)ddfcn>X=6Vp_VKcJV||#yPG8AkB)VmC~q{Jz&l;1*nXrK>+~=jx-1{ z2`+I#SSP9hq6!N|e!HNySUeC~c#*$Fa9@S>ZAZ|kGAmRTF56gLPXvQsbiuL_U3?Wq zOuGQopQ;Oj@wMN5kYi1Jz#MB|1Q^(h<1S6nAGVbW(YHb>7FBWM&`?>jAFCo!nt~@W zWh9ZA7zUe!8iqmbU_RmXcqvjLZ_IMS8o|WXMx<-G2wce+_EnE=Z8y*YRf1V$W|vPm=K zmE?x~uG*+CS=&HCA+@L4I+B=}hVN^~!Hx}dRU2~~(QZs5m$}Z4VD4a4a&MZ#rmW|J z>L426$=E6ih?l}f?2X%PT|)MK&gnpk==QjaD`~~!DwPbZzm@>5?bASf{=5L;mEq@ zCp%=5kz=Uepz~B($6FcR3k@CBOM|+6Rb?|R-1!ASBUK{DPw1+(Uz0?KvGShTqrIa- zqQ}8Eu6OQaC##S6tgfzOf%jTmDgu}`HZ-~>@dAoUScS^4!!UCN2`JQE!{plp)O~e2 zEpw&5TiuRE+19l*4kJ=fr~%x}lQS)a!R2o4A$NgWuw_)*-ZA^J!;FLdO*<9K6^T8A zBfen+^q%U0;Z+*v_0h1m7I2GLTSv_%;0llxj+Lz82T{8#s3N??q8BPFs5Aa=FnDdQPqRU8G7n`yQ+5!rxL|2NB4Je?p{v=Qx zRH}-|Fo!k!8tSoIC)l|iY-hloj zXqaO4w2g5*e}W9=(uxWu0I-h~8b(Uw#d!7ss5`A5tQX(Nde9;2rcA>kVI|cgMxuJ7 zU7oA_K~;TH9lw6Vlw--!+9;Um5RhA<%C8aWL1Nm1cY)x8Ed?)Vb4yO3gc=5N)stTs ze##wBl2k-wZJ8eyvsSn6a>H$A^|!9L^fVZc-LAn48;53alN~b;>G5`+YQ}6_$TDJF z+7nHRwjwe&NXI#*t~K4uD!n9(Ab>y&6(`M9MM+mY2?=XJsCU&wU&ofY!-@2>QRWpj zjoub;KM?1-hMLfH3vh8d)VIrh`v1gIT+f!*xcQ5H$;@Ka6$y%!@xeA>qoJ zQE2vJ#V@}1$;y(AL%b2Gb+h5}56Id&7oYi13bVz)EOe~%aqdRa;6W4wcol`p&9%o zIi~M*KZ<`YRT5=zchPHH(s3<1!q6!~MP@*uVn6=cVh_3{#BB-h!`u$NB_ZoyAdtii6 zLU#cXZb0C7pg~+j_><&?jLSH=KBZX7WrB@b6(ArWAo1HE#&6z23xooK#Ie*;h^#4L zp|0OVRRTBhG$^YQM&j*vRAg9k4Z#C4{KCG3Qdy{>MA5JHS}>Nu%PmaKD*v>)KJ zMsYrr81}-KF7pU8P0q1nHRu&r9+ zM!sh_nw_eAmq_PU9(seKsH!Y2c!=Km5tU;2R1D2z@;2ER|Kuca7}k^S_kSl5Kmo`5u)ET#{__l zc^cQusXt2BrDYwe-nqSthb>Jgv_|EDn%(yN3##gDx!Bg~__T~&^9NyCehV!%ndBe1 zkPr10+Bu}LHges)tzgo=r*xv{d{()yu|5s#>^>v68IZM*T-{vhB(xryepRkwbzM1W zl@f}z1{1{`vMUNIuyjKea&gfY9*WkbQR9k*V%)rj5`P7nHS4>oxy~9mZWdiRWZ{FH z3yGylu{vxFI#q3lzG#m+qP#RgN&2Ajr^N<6#JpQmsu6=E;Qdin+T-{X7aLm91x2R2 zXo$GbN$^EnGy_tM);svSL|C#~IZ+icjJKl$iqvMLRY3s(0RaXhoEs@Ya5xZvXhA_@ zMpRK3SjU`Zg|%tZL{&1d(xXx0g4|US3L#Rl6GW`4c@#md_iIC}38iRjRaQ?fX@5Vo z(oX#WRoB&vVU3b!dylTBsvUSw+v9@AU0_({J&dGRW#X`rPXM7^_XTj*f1cZ=&f2HRYL+)N4bIQ+oe&+kbg;XxoZap(}ljy}{)D;+8HG*#5ns_A1FIghp$(;vPb4m7Wl z)+6RDw6^WbPT1dA-0}r{imfV$!!O!vPKY{Z!mWmE)Wm{)mU<6BO3nR8X1^SOXrXxf zf{2N4yej7q=8|?%YEH;U#Pa*8P!EEzUbx1?Bz%J_;6l_!4bCLT7dF(9>Lb7fsRs)0 zo2!eS;v`6Yv7o1t&q|%`n7Orum8jH}&Qx~3z!vgd9cuQpXcpqSQw0STSEUEb-#hn& zS)SmM!H*BJ#}gRivz^qgeK*UDcCTL|{qN`4D1wY!-f=?@ZII<3Yk{k7C$5C-7r zrHd+3XBjaqkZL0)zDiHh)}e7TYG_wiH<3BdZ=@F>681Kxqrn7*I^oUYI##KTO5giu zxLXZdJLwo5ev6)YZun~1tgyYz$s}jc(?c^ho8hiqy{-VaMW$_HjR+v_Rn}6t>zQD9b4qH+ z-La2~^4*fwj|Yh#RmsvdT;+?ba*vGHgU=?P8m_XjVsi^C~x)o+$MbS0)j0YQy%7L^K7ha34C4vQ@^Sla5UE(U}Z5Q5vZ z3M&HoUv4AI?LAf%s@_iDbk|YRiaJJq8tA%4$6Y)TI&ADZ0Y$epU@9u&g4==IAfVLl z5L=PJ15-p1mr~UFT~t;(TpWDS7o!>?uQjyPAg}HoXcghoz9@@wO4f~P5Lx#@)fH4g zJ>ViX3s!N7p>37)1Ox;G0D}^7`kEl|r1e2ic|lf(R2JL;(wd;hm97;uMwS{!A;nUp z;y#e0FJz}3P;33nDUF%^8VOade_Yzn33GWPuC6QXlU6sS6?MHf3tLE`00!t0&0B~s z4QTukn%cW>65C+uq;KjtdQX5+l+%dh6jeiH7^l&8*K|5n1cs;xEzctu!d__99OJp7 z#!EA#bwf$q8fuoKQOe^nH!Sr>qjqsA>{!(=Fj%)q`Yq7vO?rhj9fPr?_d9Y3+TJaQ z7STwl3y*Rz^45x;sI78)l4jud7M(yLK?gO|Y*{Y>D-nm? zmr^$yD`U~lY4Yf`5%RuqvPXR|tqE}_>PoFK>~0>%lgQBrUidz5{_i`S^T&>j@mT63 zx?VOj7$lxf8-x)oX>8m^jcC*r6`gM(Yl8^%kw8z0RhDyQ;6o}$_#nJw4vp<-sSP!+ z(QXfyd%q`k&g{8lfHoG%N)hI>Vz#c`zn11A+aX7|29Fhy^e@Jk32bq#xVmj;kORqX zPXuo@qAN=IW4oPz?R$n=$-^gj(;c-pZ!HV|00kztVpPSm?jL6OgHN{Ry1!a1_*>-z`%l&p zQm%2Q@mh?!Phj>nyE}r)*C5)%c*6Dvby(JskCt6a0JU9F!;bXnx?zLA~ktAii0j+7P8bP7)S?b2{ozq-A zx>8*8vdp-)DV_4iZen*`!B_P!h=#Z=1I=lMKO?#P5M@C*163AcZwHi?8F<#LDsekb*TQ=~>G;pOu>HZ>wzK0O zR=~!^D-P{&3vr6t>Po2f1rpPV9w;!G5Dq#lF0TVcMPR70;c!hB-2EsPL}pFkK_EL( zMx#iUbM8A`IwVSRQ#R2dB{XsOv1W_!4rHR^*6_i^~tbV1`JYl0#}A0H=zRZ5^A=P|x8X$$TgG2|x6g&r^DPzPRFtte z3wYFNRas-)u+pQZs*I>54~!nP)F>+?0aAln&SYegUurOTH%Anx=D#9kV8>$3%#!h8 zcxY(clh7<$vLe4}b8}+^(mb*`8XVaA>N=wIrsuXKm^Ld{aeROlk*EZ%V;bnaqlmi0 z;hSxHFnqh00Yt@*FEh6Bz3{j>?i~i|X^md@H)?s7Te+pWo?D`$B+6r@d~TQ)I!~~4 zB}HUgPRQmmw~L7!bT!#lYY#on?-O$yT7AZ`&o1y3T9qF!W##uSo-0$xgZLC_a>^K* z0~{(&^;ti5ew<@gqkNu8qGRac3-U4QMh_rM>}O9M9yxi80wbOPsxv zofYIm+vN)Td1HdI3RmH{4$)lb_ zy46_?U`rH?sXBm&wB|_QU0pm7T(u>x9c$4B$@{}$jC;&w=0@>nKnYox)z$vWz&8F< zd4BO6P*Lxwu9dA4q;?43i@$>(ma%C9#I`28ZmXYH9k`nto;M^e%ZI+}&HIi4H;0LOn8{CAkaGVXjsefZA@lIns-U6q`% zdaC>1`)utT-wCk*Vy;*yLtDv?SJge5ou~Gvau{22l#aibi$BLzJr~)vc5n9A+Zbzs zb(WXcYLk?`SI^%*V68Cy`G0L`7>d|&F~-qGiiLll3qRYE&l|}npju&$ z#+hBfXh!7*TvpC;5wfM{z$OXK*6myTt>a*U&i6~HKs8wE<9xQfmJ_-AN><&N|(_v7DM5#ojJjwUfuok3!+G zQeP=&htq(qvh@X-UtJWU(DGRxqMD^u4+feQn+1U(Y2xqVu`O#6xsu=7$ZN2wtw%n% zx~$Hj*sR*ya_t@l2NCLbQ{c52?Xxk2?pCB>a0)9TJt~hJ0;slljY^7*%G|mr&!dp? zL8{!}##{&l9=g?L)}+<@T!17r^+`+0Ru_kg1*Tb%TZ)6Xipa?JG@;ZLp@(y69Se5y zML}Hgie4b-3JbRLiqg6wHcU%-09A=oU{FzM1Ox;MAW$Hs(Y4!z7NoXgfzbv7;+h#M z0W4HshHB{HPpX2|-N&;Nq^&5W8eedCWL7Xf>PbOs9vJ4=q3XH!Y;@)|tG|@7rxz{F zwBF?KA5={{gqF2pXSXiLb4y`(=*uHu_lS2-0J}=(Yov)|XdNlhMsY)?X&-R%39F1d zveQn#RYo(|z&Mktm33~6*f*qpE~!ou}S- zjFw^x=G}{Vi1y{TbF?LNk)3shTo2oa4MyQ-@mAuve2qMtPXyA(vE03mC!%SKljIxw zXS-vvva?VZFul>2f%kOkiCmw*yLxQKe-DprghKm{Z)w_$)n>()%XPiNut3+sc{VCF z%^m?2e^iV&M~zK>ZNKnStBt^?OPAitz;c)QIv(f>@iwg+uZ;Ju{nXq zeL~dmK2^%uHXPN|O?M#KUK)QCX?op_X`{if`js7@;F%gPZW{S3?Xs<6l^q{8CApEU zV`>~~<&hD0igT@IVFV5)kMT?^Q@{zFXNE_Y`%|3{Kn}|-w|sPuJa|P`^aW};)P=3o zme(zm5~yzM;$@5XLM8{@WMwra&fF4KKN@q+_wC3t zd2E&TFaZslJwc;Ssw8rmS?t|$kWjSiRiZO8cClQ04~JqX?mK9ZtQ(sdE!+da0QE^r z3tJCz>8)>FN>!0FmW%Y64JsG`mw$Tz8f_DBWz!hP#=d0Ic2{{{W;;cCI5S-5EPOY&dh`i`R2@ ze#pgkubS!4IUH|$UrKi74%Z2dZJZR>)gkeBRnjn)s8t^FK&lUvzq;H&kGibx+R8TO z8a>qA!9|W87BV=kTXS&UO?fQ<_=M9u84lZY62T?2OPO-O@BrBOCZnYGC&Yieg_#?I zNB|#dn(DdUow@n8D{M?%tKK(2LG8q1jrQ*%n~z`fS+`GIt=lC0S9A9K)_09-{n%Z` zZ3}~d)A_CPo#W^3+=#?)aJNx%7V z@|O5_U6$D-G-*MKby;f_4Jl5l7^s94mjLZhQu9EiBN|c&s^x2OtoM#hwss&_{n~|f zxg645jY$C4#I8NovDAAO9yq4VMGS|k?eqL|`ZjNIpb!mDfLqoVMHZqd4g?T`(OPPQ z8fjR`0CA@yK#Pd%{GQ#Rc&>n=Ifh5LZK0G2+}NF|6*Q<^t#;-zuo*_($%O z&sw%Ot4VF+V0+#G0=lk}xn+1VjIBe*9zu@kMAp4iqG*gk{DlauR@~6{*;`Q@r;sJP zc%)X9agw>3LR!ZWRh<$zYkTX7MOfyydaXp)K*n0SD8O>mjHDF=1Ox;G1PUPVv=kJW zn0g?J?i(jfjhV35m}#IJlBHPS6L#l!OP0EZ@;PP2N?aw2j{C#A3Kyw}j87K=9}j_+>9~c?)ACZUW7%NQ?fa$r@hH zK)0#!SiL$@Lf5sT}H&)I9xqV2k=_f6r5F)w}qtuV(muE0R@}R z-^boU=x1ePc%y*o5Y~*}fIAL5;d=nNictPwHe<(?YqGM9$+ou^Y!T4O3jhs0bW-_P zjlJ#>YfvrZbB93HoK?y1oy0gW@~15YHy#RQYuYKdb=B>=sK*CZp@dUqsTk=G+`ZXv zfw6qq1{Ri$8Puzcxzs(w1@+~YNwbY)bExx94DeDxuwc9Uw_kU4X6ueo$W0zjfTX@k z*>IU$Q|=^=+IGCiDjMG{7KVJ*teuW`PNfF7nZh?953=YQasAY{bHZAo7L z0C2Ps$mmLzBHkyr-uwGo&^W;*d9!^++Nm3=E}!jP%IA{N$-~pOhLl#u5m`Y!%iV7Q zB1dQ)y%mufp!g(^bn?m+nuCk{GPtJqQqTQtbVL>l;B(^1q@!BL@Ou+L^@ z=$Oj|%*L*+gfy&J+-@ThZ6jfg^pub*S^^5wHC}V}_N?R@D%R>Y4mvAQMV3LPTVM~~ zK>Sn}E`B7w%;qJ%y~z7EGdr2KV`$^2`ju1%$)6A5v36GZ3>|@cB5Q@?TdUnsqsj7F z&1+n)9F#UV$s;vv^E{i9?x1X;zu}dM$=e%CkO@DzAE{5Yj|wSkQg~@S&)S~z-Fa^B zyz3wOLbIg2tFX7Ryt%W+HUSGFh% ze^MV9{8gCC_atnV2eQegvdZD>v`!@4;KhB<-zkWk@rHZ?(Q&%JL-syLVT7mT^Rwy?aK4sbx? z9`=fs`Kd=0d>Gf+2F)EIX1sS<^~SxE9iyA=Z+F|fvUHI~K#9cEiV7wsw<-JT-XRP) zh(Bo)TmymEn6L6nKf_$~&+ok6D(*WF(Obh26EJzwgq4ZQU3AwmS2s2g`>g@-6GycF z0HmE(q++gEc73iw`rz4HNM^Zk_MCPCwJH3pz8_8Y<%E_vJ1aRT9}G=vfbjusgp~pc zC24eI4f-iWW3Gg5;_=17Wu?~LFt)x5XsSENBp#Q2Q%q9nV`)6K9x7VF1L^I;HR;FhUB>w=B{{Tkc zh_ZGr$(GM0&2Eh)UtCSrY;J{`1F1D7L|t{%Om42#T`}pNHn5&l092KAF-%vmOCL5o z5R!wHM($V(M$7{srIp!em(-kTDJZ{ zPyNbjpYw_T09&i^c5XqC2o6?57!;=+RrE{R<~y+Qw$CzLOC{6EspfS43saVDO~;4E b#wf@bKLt^Jme4cpU~ka{+5`jy1Rwv|sQh~# literal 0 HcmV?d00001 diff --git a/dist/simpleimageclassifier-1.0.0.tar.gz b/dist/simpleimageclassifier-1.0.0.tar.gz new file mode 100644 index 0000000000000000000000000000000000000000..03ad01e8381c16adfa3814e10ded79bd6339bcf8 GIT binary patch literal 43721 zcmZsibx<5p*XD6|cXtUS1PdAB4a^H;C*7B5B%)xGl zd;`@J8E;|NI`wbBS*Lsr_)OYC9!4 zY5&chjbtNlx{(2!mx@xbcFfnRa;-n7yOXRC0A@<0Vh0_I6)Tvc<<=*_l@l*JR_6h! zCI#g5gPsq8L`v8q2qr~pF=5m4E{Ld&=VsIRBB_n;PkZTB@1@C4fSB%Wxz(U6sM z*+_8}9uJw~s@%g3Ls?&bAL+nv_sGYpK|4uX=qv=~Tzg9&Q`*(H-*WPlD;Yh+xx6(l zA+V%VF`B+6ULF*U`OUV_gT>HG*WeT@i8hBBBNhEFpYd^Q2>cgjkN}-iqt0 z`_O;!S51llsOX?It%69vG_9P>_s-6-BPVs@}jT&%!QjI?-eh}bi*X4#E+l8 z6q&g`=&PhUH|pSLIm!OQD||UNWszF6@Z+$br&wgA6q!H0kQ5ClI}kVf^PpXnUW7<1 zB1k5UU5i^e*L0rP({V_0bi*aJrRS*n!6%0van8$`nUrw06!NHL5P6M`MtzrivWJQ6 zk>4Sk!Z%|?!>Y2Q=~I>xoE8`Tq7kW>ew%_EYZ=Up`UGRSzU*)PLQ5Ah1X* zv=!mIdt=8XmAJ^A*_||1Gbtfao(QVfVT4R=&vc^BU*|9^U$S(_oa>!=N>nWuzjCFb zB=L<}mw?xEh~PkbWm4qiz(TeY*IP+ZvJ?FV7@OF0xaGG6^*e2LpQ^mk`;Kj zIIHi&kQY$qB%2)708QRv*L+VzGNGQNa5}Uhf(R(FX_(^f6(W*}$X!Bp+UFwvkG|uy z%sH}t3Zz1`{n?baf$vGA!B;4M`-4?gq-y2oWM3vLPCXDWZd0~x6q;`1n&|HI50FME zJ%Zs>_wOOu*ejy|% zE+(!r85xX=RO_%*>n|KMh^v=~?I3f$_NQRW14~*5{!|)Tka%+F|2xR1@iqDe<>FoV zVDRAXKrmY2_JcO}7Fg>U{VX(Z1XK1sCT4yR^)dz)1x zMw+0pv&b6$oPivnAkFX;fx=U#ZZ;yy*A)GPA42|hz=J%Bmgi2M&ehhedP*|r^;tXq zfHp7WrdKk%+7WLx>`5j(+S!0Zm-($!L`y^&MN@acv8;}AmNo|39VJ)Xn`L*A(nfI) zNz}bWYbLQCrVwq{R}o*eDdM)*5;kkG&`>B3;;P-zP=DlGrhiNwNM~o0JQ$`Hx8!fe z(uq)`!kS~gH0-HVl7W5M3uSCge3ur+ApG{DOk25t?JWwDnRxEzck+3ps|f^btDzd| zrh1a*3gsQ**vydGDhAezH-$oUu$TiL+jr+U`5kT+#2PCIgRgE?&~l6=8*#)f?wWE> zP=iHUw3#n!Ff<^wuJlSim%V$fP7FS+8ZZRPZ&MVMCQIy?xp7S1Q9I<{Z;juDbZLxO z7}Gb_OS6mY%1K;3zZ1l*rNd#sFAAM-^;9;OU0RpJ+kU{rG26=Dx*@7bMG-xng*-#Xz&gdqD$qMPL8cR_E5t{a8^2+X;T29DMEGU4RPr(j)#&;HBuv$ds|c*8Jf8Zi>q?QvIuI#Z(?$3 z-uG{2c12){NAY`u10mblud3u2$uZH2=E|=bL{(W>Q`1zjEk%B`xrTS-5)&}I?2?hx zpD8P|;-qZBUfV-7kml$r@{gWRK+GUXWc!81Bu$WtQ^63xMfnXaOJhbs?X5rS>BFArEKTSVZnAWo9W# zJycts3yGDrEnzd4w;oPs%GV3~JF7({LV@AYE21pw#6F-gAA=~ame5YG9)==iy%iB2 zrImKb+7a%y(}OEkQ=Z~Lz_8oc^uC(yhI7!yzT)?{7kwY`|I%ZGgAi6Nd^ZbZl;TQl zt`jtyJc&_1YBLo2-bJH+A&Wj!WZUns3{yeGr$K+IYbE$b2|rWnErqN6ADn2?m&~xF zqhhW$$}xkUP}w(v8e<+|hdXGq(ctBQ>!X;aBa{pa&PYu37?K}v9mdXH@>?u9FsNs| z{$AKOgXt=rYthI4vI$x?#>aZ-W0h~-1QhzmL;n`de3mLfrBdRoE+Nqwew`x0%xW8g zukBXCnvniw=oqEEF)g?!#B;*Sm(`3(Kv4CFmqv+yDcVD@)DM}oP+Gw84VFe`lmSg- zm(bU3szmO@-r3n`)fg}R7vYGh+!+hUOcV7HGu9X$W(A6M1C`~B(pU+F^r{;u*q_CO zw8`RXeflP>`IHo~Q%(?MRYxX1Onvjtq`Ufw$PmMMEX>$c$AA}Z{%#PhdXcy({lrkl z{;jp#ZX-Y&JgcsBq1D|a>V`0}wH!rbp2>u1CbVmI&{OiAPeta#F^J^e@fLwhEiNPV#OH?xJ1>ggd6@9* zDeE2~rj}Jc1IlOhP;V2?0ZUY~IG+#&`d4{KuSq|pAygc9?wXC_epHar{2PHrrZgt9$>bqlMVXm`_u$wlqGaX2IPm#ebT>7I{%vw} zYkON{7z#~}s|ClT^zNSe;Nyp$3;{{sPh^2nEART$+7+>X3;Ai@^@JzgDvy$iqIh&V zW4&Q>?avyu<4i64T%5F*V}1}~8^jfJYioa7*q*ID71wB}bm`=O}9 z%0tTf2&PUxQCKLiMM-Rp;lpyvCaN0#tCKcavtFH=WmX(Ilu8RzTM_x?mSo(BZzBo%&emr?z|O7ED;claw9P3=X2w3FKS*Z z5s74?A=lxox<+73CCp_hz7wCGu?gF(Dd+s+6Q8%8>(oG{!}#_ie?1)nUNK`?x5{+; zF9V6W9~mj{QGaC5tn7w02Dh9v5e?Ui4ODy(6+uN5$$Q!4^J7U6$(8?J|C@F{UN+5F zO}yeXO1U2tZxN{YN)~<1brwVil(6^odj+EI*Q~~g5eASBj;{2%%r85QHYyD}7}R zY|PueXR=1Hcc&P~)T`c6Q#4c(DI{Ksd+A3{*v-7bDkkSx7JG>*3hQ4v5C@lI z#!XJmXb^r$Fms^f^5li4kQrOH6}*nMD!#Au9{5pG*2Zj(G$moQO@EnWgM;-4wn%X&i083>@AF)`MK*p zNnaB)et0YwLF=4C)?(*3(pTwRzur!QG8;wT>j5M(5{r=8HM=VW+EdAh@ekWZm`9c= z-1%PbM=26{JYA&MN3U|H!_z5r=$ELJM$s(^(+W>;6Gu7*6B0b~+~&3Xx6#aezVPBu zeP59Cc2WyrxW~cU=AoDg$o^^i>Q&I^Dj&NG*>CZ$;(GH0i#{Yj%mO|WTgGqoRJ<@*$sFTU z{18|1m7c$(w%^@Ll8KwJwtjCb`(Vb4nEtSGaB*zc8kxSeX2+)G$(cqkC5}@V^h!Tbwxz{j(2g-8 z)_=6YdZmQ#iC`~L=PJuhz%#=Z7)w^6b<*)Luk@G9zRU5W(PhqrXUL&tfBZrYalG3{ zY{5s%(7EqO-}sH<^vu7qWSI4cAjMlRNC6)a^}W|`o*^)jijT0{SuDBdDjHJRgP#6O;i^TkVRa^k-^u5O4+ z+DH@%bpcefn1+Z-daRXYA1qO$Cbbb28JjrmH81f~>L&T~oXXqH-oN}njRir#pqNAI zFR2>H3?|obJNN}c@ZGEB-6M%Ms#Z!z#^b)_)H~qh%R-YTAT*(z*$TxXpP~L>&w*^L z$SSF|PlU`*#fmQi4Lgg4qeQ!oe()cBO;&unS7(5)7S##cZ(JIWx+QdZO zXBal|$9@~M%in{s$hc1V;?<&5{N!Jif0!)6FF!KLTD7brKxoR-?c*9^cd{d`dC$co z9*=7T8#0l>YX5@IVvK7Lw^-tP7%QY%@6ykMJcTXJZ4m2vFpF54WV!mn(Y@YJ8YQ)v ziQ|0IKK3n7jyB#dfe4jHm8(KsOeoTUtw5K2q{OuXcU5S}7rXY{HcaB8l@B98PB@$J z9(T?RQWP0-XguN8lv7j=GaG>)E!%_n(rX*MtvGW|(f+v4!v!Rk-cyT=WZ#k*DF;*q0G48I(d48!c72s|Ij45M$^xiFRkEa8 zMkmJ>-A7kiXWmxY<>2E40lZ24B}t`4# zc~d~ahPb2WK(yYDpD8)0dm-l&4?)~?wq2aqOPWf{Z&*=M*o(6{C8-X)NYYVFWrIE( z#jC-XPe<$X^4CcLe3dN?2s`VR)6Orr_e>b6*t(-EETfk>*1gorH0QWrP8?q6@5Kvj zga)OL1J-Q%3yVS*gu}&@iSspx+0n)!5V6}mn0h-PumUWLI4Nw-1AtBSudB){KtDviXH&E={8vUitDdiOX(Me~9h*SPOHMyruL-QLAI z=eqoGH1jLWuw2lGnr*b5pubfgqEt73T}k zx$<~e#IirxHMb@!yiOQPkOK66aB)@aa_SO9y%1rdjv8dx83Qe|7!6&wg5Ogl@?X?N z&teE{*ytznPdh~|avbfSMXl=p>NlnHW}fhzy`*l}t;ZWqRqkHDq#0Kfe@w&QOjW)< zb){>k(Tlp*PkUV6mfR^>b6!fm^jCUF4ij>gpi%I366Jw!#Khqn2^EE4ERcCy3Tp)9 z-L`DjUUQ}^lUukj9{2aMH-UdSrn@xW+HC{`^ROs zRct*X6Y6MQP79DBwk46tDz+_=+XHX3V654~LSJ5l9>sx`0K(-DevU|KXEAflKAKiK zI#xy_Aw(xb{5&GtuJudK*uYkb_pDQ1@!KDER9dNZS?8NgsZ7d4Dq1O>Sto8G=6Z3x zkgUGO0sGCY{%Vh3hs`&t}_z6oQVeD7E-Bs^e!_1%$o zO_`;oh2oA?cf5rf#yaM+YHpU6Ze_S_M)b93*5{SW`?#9Ep}pTO5(z5q$Ib%zDmfPm zePeQ@#>6HzO=_mZ!4y^gw``xaX+~$XruVINq6dPzn!&0iV9nS%XXFob;hGL z3;iPg)w-zr*w@pEmb-gFxGgky>|Yvqqk&~z?ILOBu{7Xry_RyDE&!JlcXb*l;`Af~9$%Uk)>&W8GB_^6pr*pNKSo&*`O# zX3oXXcAQY29d{q)K4i+fpSoOcw1wn=!(eN}yR9_lpoQv=!@z#k;GtAueodF6%rnXm zJDTKWv}!+-)B9=Veztug7l8yV8COSve622m&cQ~d*be8HU0N&>EmSs~pAt5e+R7t{ zn%5hZQ@z3Sm&%Hrk5T41LY_iKloizkmQG@h+5@mws&3BXZ@f`%Tn4XwZA2=4JH+{F zwVlgaNDnxBSzD;jIS)P=;ynFm=k#s5RHs(^lDSms+~vWZ(sleo9< zI@cSGMoBApqYe!g+o$msN!7nPMV`eOi?xz>bH~o|)A2(XRLoMsed57uj$SnQfj`C6DX85<_kHII5>=`#PG65$eDu!N-FSNbSs0@hc zU_q@v{YpLI^8wY#NlVV>dz~`@bUviHXuo<91J{Pl%phP_Dxc7P3!R^WHNKZ_XF9`TvY9nLMr(CD-w#IrdxCjTMrd@;4kO`DUtN` z>eDYWXPY)dI$}ig;;F$!#$5rM7vB=UP9%Jxoi8+~{=9NT^BRBk*5BzEuevUIMy4)# zuA{;`W4XY1=eh5_21*D)IQ64RDJRp$vc=S(0JD*jjs4FVO-p*8mo#5iQ9o_xoEln5 zg=7V2KS*C>TTt~9VrHcz(+r$4$j}p0$hH_}O?vFHyhJwJrSeFyI>OJ_!XqtYtg6hv zF6pZt0H2pmc;=dFis1JP5^d9xtp-(1sul@MoeZ+rB&qjEC>QxA72WqKFiDel_r=O0 z##L+D;y~uP=J`MAKV0jn6c)4D>(%gE@tB}1OfF-rJhvglczouyA&WOe`buN5MK#^5 ztEpf7J6Up2XO~!QL~_%n6H`c0=$MnNa(hT z>C0DRuTtNp5!>*$o-$5lTcqhdtPSS-$A0U?5EYqY;)#pPndTC_sP#6h>d zqe>eemT~&7{sZpTCY6}E3hJC41)s-9vEPG@(74r}JOVc}^qL5=c0xmZ!NViZ%o>5u z+Dp_t`7eBGxif>=q=0$X86JjD9nKoL8_f(o+3?dWWVmKFzQ)I~ zcK0Ho!U`%+2|xsgLFaZ~69ArFsqYWTnkrijRKT|ld#@jkPoj+CL zpWR5$qotcYr<_JQa!cP1yV-33UeCV?9J{6G1cSKgMqtwv0knb}%&+j@KcV|p{zWT<)_nV0#TyvNgmY1h<=~}S zGcP7sr?!O%rT#djfj?Qziu3yjb!{Yu3>Eyrg3b4}=xJu@;IMmU#!vT1)Uh5jH|_1$ zE+_pTY6VB*t^QSlp*4bmIuT=Iid;o?$Y| zu@WxpukuW+yTxMeZCdkBCg(f~XiXP*8^pU+x>D_ATA_2=%W`xSczny`>mOXmD}TN7 z6sDDZ<2BCda?fwLQLrAiae#S1_xgIdvN!fOO8Fnz@e^1;d!XD{w`~@{d^>Z&7M1>s zJw6Xz{gYgEfMcgx_D|zcD<>4R=_hT-;k9jQD@JQgrPl4w_87z-U2Lxwqazc@M8#=&&2)IB2=t-6Ugo5Td z*=p1-Z|FsCv(qG?R@uA;)0-{?3|*Sv0xpVBJ_vXZNCU=C1|LA%H!HK1L$ePc<@wKP zfbtXkm(FHj5g-59>l2U-1>pjVUgEXJi^7I9`kk<%OjHAThq>}+5c%RnQ@&Y4KE;fC z`GCuD>a$jZLegcpi55&5QnUp4E!mIXf;fT2aS#qyCod$qPm&wZ(HR5QM}Z*;#Kt|w@^z7SH?wD-n509qVE`XHP!51I0QD?j zyL)dEezvFJGhO8sP&tep0wmpnAK!Mar#u$z0o2z(Yn1xEi+X+io%&~&iTwd}Nr*9769%k3rsKJ#jHjI59mJhD3Afrl z&PNw+fV&|&7w|Swz6|&5V7flw`BXMjs_SR>%w`d7(Vg!Ly>^#-L!?)17p42JkN=eEWdM zK2UAF+ftOSi-@Do4CvIQE(7y3EATKs3+)3RFO*_zWa_2vZf}|_Ro43_x#(itzBQqo z|M)u_Kfc>d5Quq%(C0B~XoyPsv&S_mN;n~O@N3`8S2`xB+4UZibtgf_6_v9hCS+ne7wE9 z1J=OZ`|S*5z4uarKc;oAtJeqAb;Nq43~CZV24C+W0*o#r&Ee+nfjvUXWxrn(2^mxr z;1rca%JIuKALRA&O{OVp%Ee4w(&?_6opx{WQrJMLZ`lRT?z8WKWLd7It`@uZtCGM{#c(sTa52uVa8-v6H{Xu3Z2W4EpFK&zXt;sGH-G>BS4z@rA=!|#2R$o(Iyn3TDS7F(2 zL@<7T_=D*?QD|3Cu*^AgV_C=ww#lj|uCE#LCRV8{{-2SYRYY^a_-K+nFk zS;hwdzI1`j00l#_lW(;>mS5foCb%?+jD#0z1bPR6Hh~u&Vpbe&ve>KJ}}~ z4y1RXg_olKfqu}t(=>^%X+fmNz4W(|MyB_hC)rFweqc*+}uZw+_Ad_$m93=U`FK93?@ zl|*(3Nce+l=2ejiT<4Ap!~qv27&u;8huX>pCH-8NvUlV%d;-yxODeOT1s6~11z`H> z^yDw7xgZu;up7NG_3`|pGK;KgqKT#JFWP6=_%Y*`Jl_|$A=H#_MhGD=d`6O@g=EEx?!_Td6IUeyhaw7 z={Bl}eHe(=yO3DlibK^lg1;nu*Bnnb@#9S((cbbz5{oM@d6#BqgMw%OnULKQSVJ|Y74FN?k&T$(m9Uu`;H znx1fPOC3jQNY~XoDhh~a>ofJxQvswnPaV&o#Ez_f5bht~Hta$D^dJxq8ve^P4u}h( zD^sskjNnH5JeG~~LpOL*`lRt4vS>6I1Zmhmlk(yOe3*e%u~eXvofi|qqzG!;->nO? zy#ke%!hl5Aq5vr6-x#L3(CjgwCKLD#T6P zaJVD&dhZcX9s?R^fVcLr|D4Fm9(WEgEr)K(0eUyrrYnGH0W|wFz?}&g7>@zjX#hha zxE>0GJE1t(C8Nt?BAX;b)p4HQKsL3?flXmPz(Xd`L)tYs(9Q4(Ss3M9z#qe{cXE*4 zHru}eW%uV96q4rmwNll3 zN|n}#kwG3Z(H?nG7fXeb1*@TgO`$3wNc`b*KULT$q;vL3Y(j(`$*ncnSc#9j!&t~F z_T42563mnM5uY@<2K}yw$L4Y-w}#g$JNA1X#MtI5qd@WB>P`>lh5xx;yXmlSq$dL=V;{nrQ;BKXi^ddePZUJv+Kx!r`1K=wKD%Eb*v=A65FXWGbnkkU|{w+uqPC6r?Tkdh7 zOo$^As7VE4%}pBs@&`Z|o&b*^6ChdP^n@BwXzm`{33W4(P`VJ{mF-G z(TxY5hI;^y&zh`=pzb*^UIz&0ef|M}Q=oiFz|(dusFr@SunXNh@HgNafYz|)dp>cC zOI-$ppR8y4G$|9%tr=bA{XYmKH&4LQ7`^y2_^z6bFyeoxqM?BcAjX!=8v*fl!Zsb1 zpZ+EBmuJu>3qr~(n0yM5_}&CoqXoN=gZU6$gR8$rM|`qZ*C0}f6)Oj88QMTb96>Rr8ZcLySUh2&YSQi$Clj0#~d_CY2v;XPObc{)(HJ_hn5O2_~{LW+_B zwtXOe3^*$Qn9M`|I|cet&>w90AJl;90$}CS$n7S12Cy0fykLbBC-oSp*$mKw&j}t( zFQ03vYn-f9vofgq0BG_04v6{-Y>YMwxsKhFF|e_TH{~V~rsE!6OGG<8_x=T-5SJc0 zc+$s0UAjV`*(>nz;OKn|xF`dgeUG3}(|fEl)A!-2*QR5@A|p_K3*Y~MUhM6Fy+3OK zAnR*WD!{bie*id7EE70_vEw3W!7BGasn4dz03Qd?dI+#j!Vmr9I}m(_44>2|0PGE) zJzN|!|H+L>36xI+XlC9AocF*j+7Q0vu=H!GLtxSFU?02&w5Fdtd0kLX=2sRX-pdwl zz4!<1VESiMI#l+3&<4@gnrssi~$_9v2 zC{bMc?*a5Di@*u$6JVeP18HANaSsR&BOeZvw>C@h(``xSK^b}4+8=gLwJvVelSe9b z6+Z&X9kq*AY$-OnnVzoCXEGgn*l#0XkZ-*S>RkyQz{i zk4HVs#)zOm$i*+gYtz}4j{qRlhybrfZa=vIsVt@_ZY=g){$J%+hdTp}PPI4IFL=58 z-G@@2%pO*FX^}TR7MW~A`Hqc+zWE~-=rUphb<`+AUR8eg3*M8RTGfBioTnZ8%Ti88 zLhuHxG>H5f!(usZlb;VdJ^JGC*fR19??Q5?t1457H(qZR1h7ejO|ZS?zd)uj0VR!S*8g0xItwJC8|H<^xhHCW8n2rv`h{Thc1IBf2$(14UsamS|=Ct@^%G4HaT`x%D=zXhO@ejl8 zY5)61+TbE@!E#QbBncl$;z7X^sL7t<3hBOBz3^Gqw8Xvoao10-o4j-J8TmPkgPGfP zb!$^zJ;mk4CEM^Iq4A=c^0=eS+*<{wb^NFqO63xF&d*h2$jw9P()3!h`aRsd9h(Nn zb7N0GXwrsfbl)TQf1X5du}KcY06cIh+Xl76!0E~-EX6+BI{5e}W1$Q7&96mESYVqN0H8#!;`-1` z`X2=~1{w~bz=yiJ=g1!{Fc9q>eKP|r9^L>`KP7ia02eug{OW0!wkzdwQ~|+8FtF9q z;u2`Cu`dq6Ou>cEX9tfvUyI!Ly1UJuY9oBeIt&Yp=g|J0Cm_ zYU@4+FY@U;!y*15W-gPA3PI*GFES&R!6@DIyR_!?uPPl?43W2cm~T%27ej5UKWO zkg6Z-BI#-qEAvdW6Co5u1_XR@0yRu;n~VWSkcP1bFdrLWD3$`ey1al_cZ;RPbHHJL z3iwq03O21)?=lF;>onujYqe9xB#v_AuNSrz~4bmv1CKf}p)ya&{v^PRY1WnX^fbKw6CPTYwvz~0;ne68=nIK5_g;4 zg@7xv!q4D8!+Ls)&%3a}TODgxzDD4%|WvTW{O(rc|G0A&)_ZSVn+thnF=J5r~Ee#g@Ao!?jo=c zY1r(9@wEp3t9S5%+4SEyOfLcRUGNJE4)%rc)&28bfQ#&(Wk72NFr5PEHeRY)zRxav zCQ?hRgn*EZ>=b|ty?&s%MI5*GYrKhnJQNZBu96ls#iVfsVAyeP`j{`nk;iekXA4ShXHvO zcVS+>L_v)B2W{$t@B{NslZJ2aOoOaM(E`671(PfYob+}dhvbD56(08aF!b|$bL{W8 zG`gfQp{I(goSg_V8uT7PuOet)k1pIFzR?}Ma(rJ3t~P)4erq+O@v{97R0rRa8?e?TKYrD9p(}H48aQ>nk_?S`+A`BxCmJYz@-*^1 zpB($UgOI<(*2^}u&wH+2Rf9<#=^w%tm*DgzwgOY;h2-C*7dQX$;1rl@?R;JDR3mOIBY~FB@ z1Z;+l{oPi{#oL8H1_Gd47iTU5%XUIkTJRQ7CoW1OVulISajq^|l3ZMApiogh{LYD&|A~_n?ZNs1Ao7!O1Fw@(;)AT3>m$VN2t$uGueRu{{3}m)7^d zA6Y(f-Rs3-+oRI4asPxX&LDOS+R&EH;%qK)coft~tH%HE*%!0wo0d`d?PLqhY$5(1 z%|Eu+zoPwfqi{PO1M{oy)-08Txe6}d5oRH}S@F`DjvqUA;%gl8lO7!g7ssd!QnsLE zYNZSg&=P`oEfL2m5hn$acfRBG_uKi! zg24X_KWo9crCY-_l+8Lo;v>3hhJYud?GK5Eg(XBbGb%MRzP?)G+zk^`n$?s8;j;^B zutA5bFMn&vey_x$A-@ND%P^JJTeq-7X&GjQ4oT0K>c;Hwmz46}CLq2p$|3Hj)Hdx+ zHA$%!Y@WzLF^@neF=;I3#x2U3X&?bl6ocwQRclzwF6G*z_6Y@c9G4iQK21sV8#T6X zO>+=by%cI3*2;jtAGW;7$JzYS>#rhzvzN`8mJRlI@HDkdutA?V$R?|`h@r$%g_E<0 znxBGBm_Acc^bMD27a?`_*d<=0ZNSK#QDYrt{(`58`Z%E#U$FWglGu0X7UpaM57h5m zBQBnvkY8i;5Dy*;!qg3XQJ`akckO8%h-{}gA=iCf>3 z`qks@J$`}IzDcbhx&CT1UK6&D<-gs~Y^DXByMw*CvZxv zeavQg2AXZ4NTk#j^QGddTYYofz!%9EK_B#9-?|RY`tO?knT`7L0<@r;W&89O-M(~I zRO6)X;99T0Fb}17Lh9>$RLd(WiUmhWgm3)c9k&8|8tlK72U5N_T)u6tV&k;*8>aYi z%qEcKq11QvAeA2~^>hKG1Ad{8zj~TfOd;r9sLeZIB2O)3B$brY-|4YJjEnQbRGA>f zn*%=fg{T$%ceMrkD9c*M<3)8}_R8^BRNooOHXfThprdsDxN*?r)f%w>{2>z3PHeGW z<*d#;7xu2U(shBf0ox+ctzV$c-j=mQh?=|Y*qv?hh=Y5{?M7Mu1wGoC^g3cm{n4Yr zhh&oK7QtD&Po0eOxDKs^-xfsr!ofI~nMy7jDRLj>Sv9cIcAhvQ2M{fg_M$mgs@cdj z*4qzDg;H%Q-Zgyrf~-pUq_U-RzeO4i`UQOo2DEF$A%|GhJ)HN!Dme|KC(SwKj>D&1h zq{~ENhA_eKgt)pb1Ge%#;@LJFzDAY2A^7r{gg6gba&oODyf3K#P|=RjJKjO9g3LT9vfX#cL3ueZ(OEi%Hd^y=+(h}+ATC0ul#+(#}|ZQNPhp7#&5 zsJHAI>*aOl^38yQ!CPo{%MajUN2|Ht=@fV*BH;50eXhC#P8%EG>k-w5_aHbEjkoRt z_c#~ek(wlQJ>w2~oM#lvIGGDkDF6t z+Nh?vD#-B3h`gv>(oAHn`J2=~;i(-U)#TMMC45lM_99e}CZdV39!3yl-a` zg>m}&cH#>8j|9rQ(SZfTPXNRMx|ntdbWy-g0^dPfok^A8<}DFm9Th|itEn+KhZd>H z?ttm1U`=>WKXq^L`9i$3@qPvKMX~rPLhmCV)X#yycI9&*K;I~HYkcg1o#LU4$=)_K=i*WR|*HTxq9ViKx`2p z9tNB&5HwDXJLc)@dc96JMsInm8ZH>~?wJ$_Qn(4`<3iq_v2nDYCF!yj_!CP~IX&3X ze|SluUhey@r@J+fwz|`fmLZs<7BDX>niUYIr^F|=Ney`+NdrbE{i)eds+M#UNzrm9 z`Cug}TPi18D?eaMED%ZW6=0Hl&0CBUyKax^yK`7!*ugdx8FJc->qp@%-m1$pK3 zy^2s1Iz_1-6{87V)W`{G12A$6WOwfUBbVs`(DCs+z!wLs{W^As>-Z(;4*;ay0aFWq zK|1v?Wtb`dslzj{`VqEy{9_LS-h?+3+_k_Wb*J9X`Te-o4%B8b5Zd>zR{|;X-Y^_1 z?>mBV)xampv{b59ZA>tX{1;vZvW#uL=vDc9I0c~sg8LA9z9?X&6Tb5D1WzV$m_zf; zLj!#2;64#(IR_Yj70H3F9|^Bae?2Dur?W~NI=?LX3rl@yKmb0S>EZ#+*3iWZQs8+D zCbh|h^BkwI)j*@?Z$;^+tiNf`JKWw^Y2XKfET8C}gNVEd2%c+N8P{SkKuLmVbh4}mRjbay; zBwS;cx)mDT{oVD^Tl@(djD?XEtrz}`AIIJeAd5ngXNBClGyH43HbbE<3J`z_?!pQ{ zHb2?ygK9Q1{`q(Xqd?b{k#_r)k;8uCp+P0AGz5(j&>jMy{?`D$vD2qmR3N@)p+M66 zFJqSvA7O7Y*FPSZ;q`vKZtu-H=o*pMq6h&4Xi_DQLlMsd?JpDxx*??eH@C}=wF6x14 zQ*Hh}K_iOX(80x@FRwdmuGt>gK)wq3Pj4l`?As;r_<`SmGdBPwKub&^r0jjXd6^A2 zdMbr|m{>yB3zy}OTxVYOgy?5BtoQtH5K)gWw#{EyYie8vH7^hRR&#D}@tvKjPueso zEQpiDqNSJ$7V92ZFf!^Vmh)~fnF%C*;0u5e?|?htk^Tn!XpI7}$Pqke@4PHe+k(w< ziLReR9$d~E8=8RE2BOeK=aGkohFRDROTQZ3LzzjBEz`IBKbzm7dLGyG&mg#;8VPj? zvW84MN^~qQ2^(rb!E}%!io4ruDR@|arQ8)QvI9l-#f&a7Cn<)j-YP1N zuIn1b2_D=Xg1ZJMSdd@|5+Jw*cZUwa-GT=vNN{%=3-0dj?%H&ne%|kl|E9*Mo9gP? zd(T>X%BnbRUh)#iz{rk72D~m&;?T$c zCCv$=%X#bNL?yyi_>HVH|3xmrR}LZ;O5pf$C*g5F82do*)sP0J8T=n(3(W6rjCt4X778BnIz4O<2DJCf(SJ~FYCA7H*f@t!b3nFpS) zs+c|fM1>uDB7OjJ`b!GmJ028ur9ZIzYD9k8x!bYG@Z=lJM~7CVj{|oIyyo`^1YGIA z(mvw+F-u#%zdc&TE>Fh_Mq!Y}l=SCHiV#J-C+uAH^^81P5!d-{w4!hYuSgrB4TdJOOP=^v+W-9oH zp;Y+LUHzZ#&zC-cysAn4)@U2c@XuH7qsHT-wsF6|G8-|a8}>yx%{Nj_6wC-zXD0DV zQZic8C54}*b6yp*xG@BhT24N>yYlZ7?ztmEv>2&g4_1Gs9GOj!c;GT?p|2M{gpN@a zRnrfEd2WD6cgM~N?i9Cp-lBrE0gVy^x~I9vhK)%uJw17#c`!F;`D0z zN852?dkRRnZ}GD5Lr?$OAyeD_U)jlHyW^d`m85lsFV#*gEO*@mY}0hGoRpK0D6 zPI~Yq3G%fD1-_JIatCTRZT(sfyc*aD00H+EFJ3=~Ff!nKOrob_$gs<}X9e!AypPxd zxPygsPc*5^P8`}g<2r8?k43IF_tOy#%o>}6tE6mPbB;R&vRjFYFIYiWfqFZNlOdXs zwf_PX_kBwvS5v^542lbSpAyA46J&bH*IS8^3x`mUZ*t?hB1hek<`Ii*J`WzVs~B<% zr)Al*_c#;r^>zq1R@8j9OtMG08AH{WJo^$6r-^+%NBZhs=TG4BXYVt^Z@JHAMa0wl zC~hr<82B7HBvh0(2TUJRkSSX{vg`CReEzaTqY~az@1W<0xuC@h@hqZZiI*JYRAqfv z`)zL|yyH?=Sy_^n;%|LguvzhQ>)|l1NocudcCQn)<(Cn+S_pCPam4fQA-zfF*kL1w z`O>k}9FKCGdDXU?BsL^f<#!aBZz^4Lf_TWs@ihHQ67lC6m7%V1LnD(yl)O3?r++8pqs7KJU zvP%El0Oy@yQ>9xZ{(Ellj%>`f>}Tt!L3=qmdTyrHGl@gsH+dr=J9@miEOnQ%82VF; zZuiTveEZ;m^W?()I0t5oC{{rc*Y^iMJrx&|i5REGAo)qTkB#*w+Q?0>Ynr7#}!�rC+noIgrzQ3vLCF49Yw2F+$ z{kDpk!)0wFcAUF^H1D>mh4H!TWk?ZSQbTg{m%?j=El$Em3?KN2I7O{~iIcJw6A><+ zCcVtjiurf0Wp@Rm9J4oq>H5naOghd@1R8{qQt}$h-8$*o5mn5L+3XlSPelR6H)BEx z=xAEyX);zV=L+&w-F0X2zG3J2`msdyP)W360jewP< zu(h|)-4v!y;AOc`4Th}j8*)+Jx2JJU&?b+?g=`>Y4W^U1A7>ZjPwNj}oa(b%f2FFLLUApe^AocRyR{J=zd;AfSHQL( zbO(Vtk^-JWk81V~Sin=!0J{*I5R&5VL%|lfp|=wom^*oT1pcl99s0mUa*|+R<@#F> zbvBcA6{@;8y!tU+G@8^tV~hK7IjN$4$~$;U5sBUynUm~vOCmT1)uG;CktbuyHkb(pL7O z>K>P~mfgTaQ6(#{8Pb>0nUQP6u8jYvtawDVmfOQ^Ehb{HxWv=l+1(sz*q4JQD=WaG zh)ck9S)&JzJ39|J9_p0XgW=l*+ zhEHx!w79Ua^{>ekc^aLGKnVeaTBn1)Jhwp_e-l);#0Mn^(^U?P*~5QBp#*s#yV}^Y zd{i6z2YmlN4FD^BAadzEeu#bx;281E4%th|p-NQ6zG_4_efg8hw4Zs`B=LHw;>aAl zI;~zM_LUsY1L_QIe`@qA+HO@lL^9gbnr2O2V<9ToiZN%jDte9B z+7uErCb=-H8zN-k*8!-L1S-0S3spu!iYK|e+mwWrGml)b1lbzQD-K46q^8y5Au3@q zb437+aez4_F13vTFX{23QxJI(Kw1yR`Q?@?b50nd;L#o!c!3_uC?7O(T zot31AAHM$)@jUt3YSHbN6RsWCHwYmdXqX;8t6@slFI(Pu7DQsQh##h(&U*(j=5Y24 z5>6JQTnMwKNUmWa1EEvK(gg~&<#V{Mv)RncDeGQ^ms}<6$-Phizt3giU31>L4X9tk5AyBG6T-kYuqmRlut<}U33_-Du}DntfZ)?h%0_!`-xsJ z)TGU@c1gv*$jR{P^M5^iwYEt*Q~M!M>#naBN3in{q)2;7(PgSzVU`4va$#CYQ#`+Y z_NZAz3I!do{n6ZDTwgWed7Cz5wY%Nu!tZ7r&h^pz3+otBH`3ZagN1wB#;b#$O?q%> zxrQ_bFoy5qysmRGZF2}1>KcJ|^5vYzS-Iv=#IHJhY~PaM$v#0@Bg@#JozP3us`%2Q z!IIn~Wua@pa^A^pnzqB>z^Y2HXvgol7+@Ck4^Q+U!WgL^rIAj>Jw`}%Q~oc>`d>|l z^4#^|^~U_OIy)a1W}&7JPqClh^?;3uAFB9OCN9JzHbw;f+EUy+7+ zv+!@S{vRjEZXQ7szFRwfUj#QAE_N0IgxZh9UR1BxhK(JTdN1XonfMCn5hLdc;$1|_ zeglQp4j~#lx02t4zT!>Lq^5)v;=jxlj9*}!WG$u}G7`h=xCEXqOUBi8?_DP_Z-n<` zz51yM`ie7AQ?kMXg|bqFO$=M$#py2c#N2Xzk0EYPwK!rf@{?-T4k&agZS$7dVSo7{ zk^3hp-aA}T4hk=pHg-3SY1wB@4eh2+8vmjGgGIf!=@fY-Zue2j1K#3(G{?_!tgzQT z8l8$^Y4#$U&?x}UJ;cg{!@!O44h>~EZ?Mwt*&2%-Q&I}$Fd&iz;rQ(URyR4=+!7+G!HN>)T$F%Nvj1&R{xF542w6fQ`Q%gef9CC5UbP=#1?#@=zhN_>?$IY z(Y#BlklEjhr?j+*=1a=TFwZQ=h6q1Np{0FqJRYW1`6ZDSwRhMP|BRUtaVPxVEy+G= zha6!e8^PaeNFA5(kY65USN3htP&n?O*@QE}Tb(X6*)ZdSW04iJ%8RF2DyOg9u(2mJkdUri2XzQ@jpGJ} zi^Fufr!PSHWs|aoLstYfrX%yQ!+R-<06P75`hWWbQOZpr-n?SFZ_oSp&T4LzHm^l4 zdsv;>ObVBRoSNgNyZ2>TP8|i4++>m0mtFt$G0ThWnMB`LOXSx&`xKTGG-jnRYV(`s<8VVqPp<{xpoFl8%B@Y|HW4Rs+endp0PD%Q+z zM9{Ziab7h7*-IcetakX#2DR}Y39awFe&)=l%Bn287?k16N`y#aMQ2VQ8|IUfJ-fT00el-u}tDY z>D7?9Q=3!~>@Itq9UD?H=g}^|A6GCYT=>q)wHMr0&!~r2*GvqN(Ll|2_bE=_?sRLq zJv4KESt4Jo^j?S&p!-FPQ9fNPpDd-%)3Za2k3 zjjz(hS}Ai18>bX?*=>e3beUxMqij${tMz)0+SWKNxVKkMnzN|p@UI(q$cL4%ICHM> z;{)DY#H?t^6y6x8GRZTq`p=Ye)XJdmWY%S2K-l4eMioePx(Bf= z9$Q=-``tIfU?57^gQ+(H z9=&>}Z@GonZd&%6U=}gxgUdCzj_--uCs_&Z^6xJ2@buFJ9-N;m zoD?h?@u~wmKmeG9YQ9@AjZCc$m_HdSss}oWUaD4so1YM)zovnumH2M&6Yv`+nD3{p*PD!iPtSVcKw{m_)kBB@1vkj3&Omkgm*3 zI~_Pnj>vy&viPlUHn3l1%aB@SesiIGeHC9G&FiE30Ad;tBG&z^J*n2`AAV2jXU3!b zpV9Vz>U<6(@YOCP9tdj-5J~~C8j>0SL4H^iCCk9bngHBg`XyWQC;uEyzK&I@aQ@#(;6Jzrd;T zAj_1?%Um$dQ3z>Bg3l83UQES_o*_h=J^cCOI~1{IfoKkd4wh`0HI!XJ?P`E$0*Y%+ac@H=r`&g(KK zADrf9JszsZC+P9Xj=aupGDu+fwlOCYhSYa{p8GvyZ{tJQBawRnVgn6o`F6+SRC)Ms9r zShB%aF3h^f9VDumyld@sSPK1a%rHSP;UEb|OL}4U>8yM*aKa7aeZJmM;cUvE*;*Fz z5lz8bmX#6B#yS?9)IVb7AH!MhBwL^lScf3kw(uA*|XOWH&<>zBlYX=hOp= zP_8o=ODzG5KfvEKg0W6PdS#{i_rTB%wTCsJk4wQBjh!0vQJVyMh7t24%_J@HWS!1( z0@X|qL%;#on%<4iQZ<=ww?7eyJ;R6>InMX31o4)@ZJq^+gu|T?H7{0$n{#`8!Wa@?Fup?u5P=AFhW8I=OPY8E#!Pk%9e7P)j2E&rqOlQ=%XDh@=a1QQoXvO&+cg4gr2JxAPKFJsz`% z*fb$ypeLdHH^MVkU7wb&ySL^$ZCb_mN>x_fjzkLf_nbX>KUvX9cyh3CCsQM(jS%3s zdbwYaNZ8M-&xg-f_1Dmn?WHmPXV(9ZzULSM9t{{H9n9e=5%6?9p=CuDFkR%*0Oq%k z2h?g-_OQ$ia?8kC@4n9zLu_ySWx@PMHc z-(HX(es+#NlA$f|-D(14&#(7CkC^rBXs zW$k)I%*u6jJ**mf()G&7^3B%r@KBf1D+Etx+~x z%(|az;qFrjHMN>L{)?v++1q&+4-YAV*C0&T5ui%GHh$WkWm*-CES~h=jv?~@VG^mE zakQFD>894EcZ@A1le#8zn<(0K-w0GN{v2GH-VDk?I_)NX*iE9yd^Y8HUm z?4NUBp(LoIDgPp^MV?WT(xwQoAhkgf4%xGo1!86xqcgWD#c5Cn z29s9LWla94LChCQVuwew&epVahef}bNha5TFG*1tx-Pt?OG8$Iu286$Po6_7q6}F; zPsu|59jVQ0nwbqk2TAEFf;#+Uf!3T6F<7qBa2% z+nKIOEHMR=Fad-~riBjH7ui7EeA76T6fjbZ^ZnBNbpdXL|wV_W@A*dn8rjsQ}M4W4IYac#%%~!6u(X3I) zgQp3wKCApR-lv|k34O3$W2+RZBt--l!K1O=$$YZcmaRhx^%ioX{EKw1Vr3;PbM+if z*1cMA_{?XUtn&_whoSIa5GapK=Qfag^CO)>Uw?mQEn#I~V8Ul<=WE~y**E;zYda@m zNOjjkstVmU6){g;>DyGIT2^U}-#qpJM#GQO+?uJ_mvlzy%Rz-6waz1BV{L0_TUqTd zSXfW-Khmn*5E&ZfQ5%fH3y-p}1ZoIP5eLdj6@d%t38<*+H_@y9v%7y-Z8u05=T&(E z*T3?YE_b2_&_3o*X$dZWC^(@PYL##)VDlB1cad1uTM{>XIb`A+t*mQ#_sPM=Qb;#H z`n>pB#Y}z;<;AXcLi@>epU^Pme9v&!9nJR7NOYR%gkN388`JNL>U~ryHk$OG_T`a` zuzR>{L?QE&cqwx8HQXp77`4k418QCzxBrPaytBKDjs4?}ddqxb>+>jC2m*#YPD9NI zAq&1zw%cCt|I9>Jx7Ez!~foMAa8%6M@bYz%NsN(wf(1&0g!bg%r3rl z2{QkTz2QCJ^oFjIwx@T`(Gu&Q2}-e8^COQhgp-$^>p==PWpVJK?~KPJa0REci@X1#oKT;YxuC?FBw02icONUI~~-m~JWB;%D(& zsM!L2UUaR`B}DJ-3T@+^CQIHmbQY4K<&3G@mzx{#(6M%9Y#98Rt+epoWaMwgY$+6t z7d>Q1L29*O(-=90{F`(M`7AW!STgz{SwetcY?%B3A#i!PUh2yh@A`*OoK>dHXV%vE zTD`rB-Ju9c0)hn0ik&O`bMW9h)w#hNR#IQeHYA+gEd zSytJ@bRQY;!m^8J`pPt5{zthk+?Opr&TYyF3AN>~*vGaRVF!IxcG)R5M#~A`dubiT zmr_fuOLQhilIL@^i1OOCKHBT>salMTNuGR?O4pX^&?mJVN;5BNv@`h?XDY-N?HXXm z&U1Z67aA!uhUX>;zp;tWu``>xWk7n+UVSO76>0gQlYHgR@mLVn*9Vc#ob1Dzgu)kd zN4d>KT86T|cl_s%VKVs)nU~aii@%A|7dt!63WiDGom|~m%gdd04E8HXKrxtgb}2+= z@t=IziETWBI7d5rMH9hY2hoXXzvLXlr5A6LxlZe%#R*8D2L@rGm)$;vf*Zfd9j`c- z+cH&R$#qhTLyJu@oO+DtF{{`p1^&FMEdzzGYK9wCuUv*N8-1|q+45D=ahRabF8 zYu0%B*>l>M@cmbrFpC2Zi>hm}73;iA?107`8y*Yo(ZcTf8ZFV`X8 zYZlg0K2>d1o}V{$Ci~iIPY|yZyb7qKtI;n7rygq)Asa?o=Y0S7$_t)4_^S3V-~4!| zt-3_%<-gs;e+T;osHY>c3>hUtLqBvuJu~YRN?q|a^&XtH1jxIhcBPQ#-176ack!NH zUh2S^?#nt;@FR+h*NyvNambn+ZH@X zb0iTIe`L3_BQk*CQDner`Lv^P6`8RbLvEzB(&(jA=%!P--JgXIhCeRGmizQJclz9k zNYbbJH!XA0ds(ptD}=Fk4$zx(Lu%d79G=X)78NU$tI9Mn-`E0f`#p3jEwrpax(^R4 z$p$T}7s#D63K`;jcUV(t^3GCUlI=CgSI)bBv=AcAN;8Tkiy*en-A+VTsh~VJ%}DYs zpPIZo@Y(V%&n?pJeV^@>=`ek8o7X}$8kL@T)*YNzrH`V~3;H9J{O|c~?}1dH;*0T{ zsi;pu(cE_l-333+1VRFpN!*T=*hK0)5pQUlVhuw*G=Lme#4kgnCG5W&+5Yr!sIf~S z%uvYw3bxDtc>M=Vv^jwHLln9L#V?$+0m*2#$M3%Vr3&to6U++n+K|7*lw+(jLOH z@P&UgUkdycTBLq|C0=x>EXDFFSqNpilk|Qad^U1x{kr^W7W8P;@@f>*I12;_1999I zUIwdAV{<5-bdhSsxgs?(H~H+3EFK3vSSCSTI4$btLrRxn0La&|ZB96mF>-w)0G;x9 zU{OH+ait5hAry%>>N}pWl=i2b_LeWT6peE%$_|Ps2L3)30Wo#+?{S=rb|jP}hn9R5 zf(f3v8x?8l5+_x3$XHAV7Ghs1MRO_beln0$zLdng`J*7f4{(f0v^tubhP7ng!nxHM zOgjB-x1W+9anQ)U+bw(88FKH!9bA&?CX>zj@a^>4B3+yD* zHZFfv`c1idym$})jNaq?7KxwlwOa*Bj7WK-u4l88i$Y&?sdhg17CNl?Sj>23qndfo zA}-`%5LgkRAYu~RT9PN4wEL!CNxqBMLAhFAi18&+A9=h|kO?+#usv=|@p@1kGHT}i z+>sO-_Ko3}$p#A_T$UN7d?x)^C zB+ulb&;iS)nR!^WT+AkTBbDVv`&LBMBYh{^bVNEA^y$uVH4{tG*@>o3w#$fG<(Tn364+b#tL+L=^EoQa8; zd5Mc~eKDA5KX@IAYl+NNQe(VZXzQK#*ECsGS6s->y?nxq_c=$MkwF|+0X7@@ z4x>Fm;s!(}TEEA!-!R{t1rCI6^O{74Z!NgX@(=cUQ>aMaJ4Zj9i(^7~&11=jN?x|q zrG|7wJ*K=w(t6pcR%-mw5Yi5KxCoPax~N1*wj|Qx{~$y{<;LO*H~xu2H-|KnlAk9*Du5yN%d&ec zfiP0q+H&@v9z0rUv!_}{L#(BWlj*bH3wx!bq=5+5%BY{%A3Auk%;o3S|)o9@Psu*_=9W!wKapN>5PD? zenOivvglqNzdr+bYIQA9s7_3%YKm{Br66puWU!ti*TmKaO6FIq1t21^qfx{!?iRFE zTQv!gSE#N7GxiS|dH^Vb3FNXrd@NY|B9PgQPmj<6=N(eNtUuF2+{QPNTs4?PqiU*5 z9Vo09gWlmHEP~ddG9!NNC6<(bA$5n!KW#330~!N>O9=SS6JWawR0uqRn)052rd|L8 z;ic{k+^Pjb%uRnBsyxQ0-r16x*CgrPe9a*#A$qHxP}g=`t79MbNNKlR3i@`fr^udy zDW9xPXMZz+!r|gSVIA-u0!I-H3J@F z3(q>`jvu3`W+i*vk6%`9mpCe0!yB4IgJ{mn!7xAy>>I7o-%b+GN~c@KHJtIbf|>~b zPJjCN;j3wBswDyMy=Wb?hC5=LEQS}hr2LE*wOWYe?v}~t&kT*PpwHe{@K%#DAr_$q z>+r*gFSI=du5#_XD5LOt7MH$9MfW{vi%p}(Ghf>`5Ar|lpagwegSA*$4BzMb?U`2H zjbQbSL0X7U>eG&nyh$Z(CJG!!wv@eZu*()sKSQk56m)!iF^1^|p7VIzPTDCEw8or) zcx3m`dWIq>)+6=VwPUyYIJjJi_H=2cds6WCYhpyH$Uh{aI6vcm+k-uvly+H;b9lj* zQAbhA#Y~m~1i|!N%_|XBH(7zim>wt`n{Rz(8dnaZLxTdE-do>_bU>@c-E<2U&f`uE zo8}G$%*p7wBo>_Q9mA5|>bzq{C|~T}W#a723bPBFSg^b5T{851v8#$h52uFAtqZBP z##pX-)h*aV@@m;-deeTAdlpI7Q;d44{wCjtSRD9LwOs}tkmYXhkBn(Q5qzd*c@^>Y z)6Y^;vinqOH)eSmCQiMhD3YnbtUPv6?qg;+)m+6h(B$%LfBa0e_gYkEN^mg>6!vY< zkrp;%F`vm@his7i3Q(6RtC?zQ{#v!wi>u;Zp@gscb+?SQ`quMPsV}lJfy#yt=a=6| zc`v?jJJ-zk$iMJxBlx|k`1bn7RV zTZp{58DKO1%r+Xzq14>%159|l52|;?d)i zp~%LAE|q8K>*-nL!2j~fp4&L!vCF*0{U{$jY$Y#@d&zrEq9%||CamrC2Cv% z72(G|P!$GTobF=5tqt4&S(Ns0k7iduEd=!61%iR5YyqgGxmnJ?2W=~{+R)yQVF;br z(424gT2Fw8m{4>N;9C*f2^@*;14fBfK1XNDI(Cptdu3kBXfrFkp9&=7j`pplQ<*fe z;`@9DH{|WOsEE|DUKjbsR3dhYG-HDvi!^T}!q`no%dpucH0Uysj;-w z_Evr+o4tlRq8PjEq0LR_4Ym9pp4A-p{kLI7FpnqXLlUAz^AQu(;WW|F-y>EUjdo#r z-YROtxLKR1MdT3)>x+zzN=Th^^+FQ37?q=r9zC|#SOR~{o$NR(kM2j6AxjG$i_LZg zEACp*@P%-sh*z)DCwZ$Oa&>jG)B7m0Dy>R)`Vj0o`sN< zjCqZ6YHs3hqtSmeD}0LWA|ETrNp9so~GtO4^5O&M4f zKu4b|p9wDCX*_9tN~rGF=?qKuJ~o<31^cd?#+^}_m9HeZ^utjIQt&Pk1;uWHI9Aeq zbak3fQ;%l~I**MUlS!toCTblU_bi_zz-@3gjnST|)O4fG6GXF6F;f{8_85WaHnb?F zT&y0D7vXjPvc#e1)T(pt-PGz0QU9AxS$t&lcjs0QeEWSA%j}q;u7{w(Q|87OKSI47|5M&+VXq{A;o{hgxBjvtKddS%c6a}jqaOIi-0KXS%Ll(F9qPefDGR{ zoeshW*D4{5q`cKTLwm&D1z%ejSpXOHncgp;g0dSBc+jlleY-@>eGJFUs`rKbxQu9% zznZ9f*)??gao3KzFvv(vb>r2~6>DzBY2Y05wXX;d+pG4IesaC$lbl*9( zpGtAT&)kN z*Y&}M2CqlE=u*~d`$7`{xd`x}vjfB&Yy$M6kqzs>@gwM|wrcmk*0g{pxOf88DGU^U zju;qXtYnN@GFU*U#xBXSY}>H))UPn?N9k%U=i1Qxb| zit(r`P{jjS3C4It1J)0)3aDyF5;_P02ypj*|4XICmqru4sH(#DtZZzH!JF^w33c*z zclLJn_pv8!)w%j%d{ZN!boD*s9{`g8D}E~=(~l1dZjq+jVq zuEr|=w$4+9T=n*batS0N_f%D|2TrX4#G)V%=^U1UC${91CyBsts=oz9q3YWuP+t!m zoD70Mc*@OgI88#e@D=dbj5P5ep9;%w3LBDBh0Dgz+CmfYO|#{(yj4cPc?EaI+oiik*Vgv$By%EXyKj&>(f ziHi?}=KnoOOe9*9=nxmrPcVfI^i8*{Z5^IoK0hPrS*MVPX>&sw7!mD7pXMJy#lgUX z006>;Zg?xc=HyVjf~;i9AS#(DsD{*lqemtq#3%=N?0f=-?plm5L!JFxyb&=QR+!OI z`fMvsW&r08s;VWRG-yFo!Fj?{w)Q$o{DE5+|Ud&eP=5qD+>&)d( z<_}$|2YZ)fmbR{5nS=VaLwdT0w~%YZbmR4$>>I*gxxQH_WLkRe1sjK5pvmUxi?jKd zHF=rji2(6-A7E?R$JfivRa#R|U+hSTV8ZE)981pc#Do0T9(i7=rU8!N{=3k;%BAyp z=a7`ekY{n%sv!)B#@L6AP@uTt2J9yQsJ$TXNpjqNf2v{z+{q~ahyD6VK>g{xLdx3m zes8`$jDxOWY`Pc%7Ur%bjmCfl5cU(7r_XJPS%>JeTpQ5o`Bde%1d}j*o?v!c2crSN zPwwfW$Oy7%E4D*ed$B+)L*kR6b`$4UON!QkA3Ln9Kl%lp62VjaTa*x7z7Bp zLsMb6vcmNbXgCL0pFjk#b)98-1!;-}=AHqJBcST(O}zwAFafl`0T%M8s}g~o+Nx1t z`j`M#_}l;&A%NrCshZM~%O7dgbdh-pktoMumO(#pr>~KGQo$<2)5@$F=(O)kO^FfT zU)q?rFEI0Fbo`;kW!t9BSX5oJ7W91z54W_fiw{?wS^abR@S2}cuqOCozI%9F=v8$* z^#pQXohK#$_wy!LHMcMdY@B#TJpt~Jfu<2aNDZ+`;!8SK z<<#IMpiwtvbDtxCP*?}C>`GbsydzWn*WJ}MO?!hcbIjn*F0gXxquiTbVaTP!*I%Xe z;(1raaYfW(=t1*^!rXK(DWn?Q7ATsd#WaZ`0C*e#G$BT)KidDpw^)Gr80g4`m30%q zP5lALNDxl;pbO}QZ4Z?XfJZ$D_(t;?U^ka(hsDrEh;|9DK)Ux@5b)fEH58b80xfMP zJARR4W!@j8Nx@!SZ*4~iA@9tiQ;r59+Ed(~@q5b%s^{5iV)jt2nC+F}I~ zo4(mR0BoDU-bm*(U^EJ>+Z{vHVCXgU8N`ypy;I6nd184EGz6aWulRl|g!+r76U<=YWv0qUh3k>aT2haS`f}pO3!3GCC`$@J&;0w z`^z1l@*xCA1n9+pZH}R!WLO7BXqaKs*G=G?JpeS`0uM0onBSV4n#j$jTJvk}VF*vG zkh!WT#>Dla5r4cJ@;!Op&DwSvTy34^ZnH%pVNZA~n95?vfSLDX;HGf^ntY-*3kPNES#Hm^Ko4T*t z@QZxpHrrwC4fzW^FZSgth7RS2rx|07vF(rRRQ}M-Kk_2@eTl+?yRsZ@lB2#fYT1U*_9n&?1c#fV{Sm|zw#Dn+XMKro{j{r zfoTh)A%Ohfi)1Uu7yuRobcpMobjKtykm^Jm{(CKI%O5z_Q_L^hh9 zwhI`^-GFqbxE7J?_koxr;2CnTFazZ5L9F_Uq@KSiki#1i0$fJ_P2H8VqZe=|JQVB< zIcU&za4zS2LmiI5k&mK>DD`SxPxk6@UVq?F0AARz7hS9DSXeqatIeBPro7FJDVm}D z`_Yv?|E$)4-|8y_#X;Qr8}C40Z#ew6*xf zs+^&9PQYgzL)h(py8;^Ul$bn2MLnH_)`d={NFSIx8D*|9om7O0>|?M^EUX}J_%Hr zt#?+h2

a?G%4;dFsAIq<684BfS^<)S~rn=G63gD5I#k>h_6f7c8F+DZT)_5}24k zcMta>deG_X4fH3R3m0cfDS=a4?g$8BiAan zY;T|xeDia{eP`BFrT0E8lw2WHoUA3=HfY^a8;<@bx8P=+uTTmIYxRw}_|t;h`M+~^ zObQf|oj5E}-lDo;X3U~cd@m{-?g&f`pFyaIRqbEbl|)H+@NdNs9c2qeSL=bs^5^V- zf>3`Xm=DxBkNswV#wJ5Xur^OwF=_yEp&Zd4;&ZcjM|U$%S=-g>UeI#Z0?$CV5dN<2o3qy=^=A54T}r@0>CtP6bj$#M`& z^zJ_wNi#F(j}j(fk!y7^RW&-Ue$o{S912II|NVjj5ga^0g^S4(&AQj<16b@?T{V?!$=~*4~G%YCFI+K&$eQjO#aUMGWu?VE;GlVxi4hrB?VI8k#{+!$M^Z9ZkYB_-t_v4 zW_wGYwjk~tuaRHTo{?1rbOEo;MnadsLsMqXEVgpKZ|>W#>jgd@#RuQ5ob~5sza@@E z1%FXa4-z|5RyHjtgmD{~_?tv3WqXMn{(6n!sKZ^OCe|CJilzN22sL_L9~uftQRgTs zO7}$4N`cAyK9&TO2vjSxJzdx*YZqL@Q^=$w(mjN;mmjKGicn+pma5UaY(myY8e+8{ zBv~?yzy$rp4L() zajL(B$>(d<1{2AWP9uLnXrNX!J$`QH_Y+P=^+HaDOL%IuNEG%mpD-qkA6)L2Ky!Nm zmr@7tHS_C)Jz>$c|MRCpC^)E9G$|it)EE*xU+k(W?lRdFa>PS&F1kIP5DshUL_2R# z>l2{Io#IYEQ(@qyc;TJS!4JT>c5xI?_%w28g@B!^GU!2iT0}KFCoq(k(Z~J!`+?v) z=g;#!o(jZP#ISFC)+19CG!HB=EvqRs?@3J9VV_(Ph^2=<6XV{{;hL|9nsblbfnkk` z#H9bsRwEfHh1TO5`7{&b@D`gocK%YZFl7As`3om$-r$$`0@jiK)FtU5)LDn%QG?H( z1ohJhxl0o0hDl#k+eXqy(w;QQ*lWpEv$Yaf3x1muTX8GIOhy{hqF?`+J>dAG{^uh< zd)|10e_lqWUK(5PmDx}?oEk|91Llmcc2do3EukopzoEDWMU0UWt`A$4N zl_ZjsB?aq7u}<9Y{*c!h@7HJ;lIts|GW~08C$k!}#XCI>Z-)H%>$E&%f!}iek%}jB zOupLSR!UaUiJ?3Cb!=ti=pzNGO~eyZ8Hej*A~iKxDH(;D@kY=kJ_`{6`H5;z7tVe{ z(~mr4ja}?^XvLFY|C+x^_Lq`T0S zkBdxgu?lnrhr6|3s0iPVUegT zFWE9Q?f(moCvn(dWqMyL&L8bR)A0hIw#)s$ z{e8>iB2txlQp0&qmf|O)Aq=%>Y%McH!k8q_FCbbT@v}6FFe$et_2NX=DobQR-hnyD zX~TOMP9yt)1h}#f0yfO{=j>vrU&TIyubH));X0)Q&(R$dy=Lmfb8is99J^60Rl{g9 zkB7mXuWT{ge&wdw%ozU_Hrg^|=37`TE`cfSAj!%R%*x;jWh$A9Vgua}7b>cGS|n>zkW$A7Je|0;dEG!|?{{8z8h%;*1UH99)}>t*7< z{@czx3U>yszr*}nW44GVfnWK-*#2=FguS8bkN1svJlfvHH1*@8paa zm$vUZ6PG?8fkq#Lzz8~QtI=xhH2OQOhGD>$BycPo&+ITtBEH66FnBm{Ond}YZMMKy ze)1}al3+15ouO+i`5TCu{C#JKc=RYLi!uA_a9Rv8U#q~DQ^keK4xroW)JEH&<-0)%zcE7L0>#T#`A(ZGD+SA z2DT5QO^R9Z1hj`q;BmEcFn0m_eQwiDW?0xVu1;=F z&kxT}Zp`bWi>njs=I5)E>z^$QFoQKndo!dQI|<-Cl#5fkI_>(4pj;lFUtHdt9^nZu z^XTIJo73|X^Yr}s=J5RJC z{4Wlgew&8^IWKv+A&Z>QogwxgY#k@qlOm>^Z*iJU1lV!7CL)H!_SD?7wY|+tZ{wP8 z)m0-cYB&d5%Z|oPQHu&4e|27D1uFX1Z|(44>+R{;iSi!J?gv|VZWBF#a@(doT0RmO z01qkHsvEY$=O&5?7sEMJ7ocy@B0!-KA`X*RGPfRtOosh!s+K^y6rEIp7uK9;0Vg$m zA`TsF{cB+!4h`IA6reY=9cCog^od{&BE!+Y8L3fki{dGe!vK*({)1;=V}|PU+?gPY z`In)KhK?Xy?EGl{Wj6kol{2e$orm6(bd;znG@d}msfp~{B&07JtzoKtJ{rVF%sGnt;f*FTKG)M?v8?lTNRX*|a^klkKUDAzsYb!3Y>dTtxeZ+0763z3aQv z`IJ*S+HcZVkn!fh_%SvY2I6lv#XwLC6Z{?|meE3Ph+xK>m`BplmEmRo@~4wO8^0W$ zeK`3SXw40ylI{HQQy%{kYIAk+{t!6s$(7n`8H+`o7o-a}wiUp-IJevpZV4ka$XCp% z6HZv{UdDOfxwv73G-KW{$hy;K>n!b+^6%Ivm9t(FXZ?|n$)tZU(D!#122oW2 zs~hcF9i&Th?)vd=t(F?dxWfXv^C>qwX2?jGsb=CDn*4vWMHR7tc- z>$)|%#VstTwvby7vIW&|CI%i=w)?iXa6Xmkht#KiwpRaiqevIibJz{*^_ zBpy)98}?BSzdSvw3o1)K{U}C>o2U%CwY|AK8LWoEkI7dpHJB=PrY@q34z}LByV}Z` z$O-*W<3fim{A1#Os~$pwJq?D%Wj8Z1{Lqms3>Co&u7a(<{e9o~+uw24{G5|V^ zg$Z9v7NR_?Sm|vU?ZEqwxw;z7n1j-Vtw?XQjg-!8tuvB`242`0CQ64@6+&%|SZQRp zSa$d;34&F{Ho#;2Mr3&(Pukq=HbP1R92KD9UDwD3O&AFzD%^m@f`JkB>r1BI#mtNjJyqxk6bo zv-v8DKk%!5(MQA#1NhnLpn)y1Xt_6jD!`t|hq=4XY1Oc))IU8CmDl>J1h z%1;+qGjw}0V8FxVutX8U<#jC+=p#-htkLYz#fNjXRY?jJx|vYN<@1wMNT70-b5^(R zTfGiuA2Fw&Z?`CZW~p|QFG~!qx;343Zu#3;M-d}3;xqLFv5ZRHZe>wTT60bO!J82^ zNF2JgT@Qt$x)H&abS9+x1qK1iw6c`tZOHEU2WzyFPstUitqUTWd7xw5$5r)v4P#4E zb!`o1kb}OA{Rpy$lbF+AGR@aB@G4~up^;;S)|k;8Re(j&Tu3*(edcI=Q_-fSgj23BAC+&oETVnKqqcQTz!?9W7BwldGfBf zSdY+Xvs!I|B$Sm-6S@~47 z(_hE`e5LrGw!Z)CtHuB9X5)YM_8R7Hv)}DD_cTG!@jpuOF+x~@xksJ8?M$2qg&OzzjFZVoGl7fUJmFxL*YKMz`<4}}0?Agq7 z9dVX8FouCh+azH)BA46G!IW*!;LE--iQ`$czq5n3lkJer0(01j92Y}T%pe@^jF096 zcD;yVHZ8}<%Hh(WRQ_d~O>NiPHklsPDOGYC?HxljYEGy-sNDF#s`U3>%x>4PoVG67>^frBL!>< zNwhj~2QE5Ms=W89E#scN)fAP5&AE6m{g#D1iJEQj#FZj-kCSAw#KEJ{aeo!$tg8kws1x~92BINwVEX81eik)Tmvk=oT@_F{9d31{Sh_isSw0Do{tx%^&$6O_5=17BiM@&w`uVOy!|Y0 zFDsoz6*2pAd-2?Qk@yP(G~&=5%n_$;f4CEIkzw0%!%U?L`5WIJsxeJf`xz6i*f5pc5O-<%9Au9<$KB1=@VecTB{GgonH%9Bnc- zNGIjN{;OZo9u3a#ZF21yN01d+4+MEx6IEzt)z zqDb{{%D4X_v+i;Wd-k1VlekLB;o7;6o(PPZg6{KEw}&C@b|};AE=5e*_$6cGh(}J4 z{Ms&srC=8aIhjvwe;b2=sH^B7QR!;RJ9svdD9#Mnzh%CQ(?TD-8C#A`esF39~( z>$O$bj0vWM)%Z z`VeJ03VaS2Gk(<*E1P}5Ho1|3y)l3XRMew zMxoFk!~8jzg9x)m-zhVVO`?ZL$(rr9q{ijU`{9W^mMes#5leKGED=e(*s|^LNOMN~ zua4o^^sfhG27gr4_)gfz_$^s{`YT;``ZEq~KbpER|0^@!FkX16Qux%l9r!$o87quN z^cz9X+%%P;q_140q2pAPh*5Y(6}ZUhp|3F8qq$E(X|@N<7$=(_&Vdokf(Vyoq(}q# zwNfDRRa;j8Xqm<}^j)qf7wceW!_?$?i^IwVxrBit`+y2mH!=XVIPLa^+jlNz-VTF> z?Zu02Ja1A5mf~--d&iqcfVAo2Mf1TnmuZOhc!CRYbtWTZ)DuB$2y-E~LQ3jy8E7pj zdvX1>gtl#xc9TV<*nS{b2G`NYgfO~KT9WE-J= z1ELX6;;%6IRZ-ljXUN317)DBxGrJ|*BH!55QYcTVWlLtKme)zqC&6Xcf1s68*N`H&k<8)E0-u6 z>r(8tnPrZ0HWm4C#kAqp6v}qmA(8hMRub_|Rdo=pi9Oosj>}1hrg5q;K2?`5Dn<2j z)m_w}?P?E)E*A!%uNX(hQZX3md&!Vh&YLc(7XTIH0r!}BSFKAuB>^$BH&khn4o(fZ zn22OGg_~Le6oWz0P+-TgjmHpG%TOI{Ck$A+%fpxv=`))|9TX}zcI0|oVn=+&pCn4S zLMOl(reO*KQkYm29bJFU1xJPQ7G>e2kL#jh$Asp9KG{h|=VS_s<eijv?35s2x8N)PbG3o^PcY=3yLB&;B2ITx9o7yN<@r}vZHP(}V^VFVf#95dk z@>0p1X%se** z6SfmgSc!&PDKU7Jc8}*I#T}FrK*M$3I_1G?~yWDW{!X0$FY7X}lg@igwZX&6 ztwumtn)Fn{3Op<}V;XOTb{(N~D+}`JOV#y%{^Nfa>;Cf}|A!OPL^ch`*xklkBdvw!~Me?=Mol8&y}CsiIAd5A(pS)q*`T!k_)dA~P~Kujt=WEmuVY7-@r0{^(4_Nky;bW!$^T!D!elGL_+HsnK5CaMJk^Ca?gekk4V>!A|Wg{{i+KpnsoxUjZQ~smeeq z8vzlNj5wf5Xwo2+>L$K))Rd_z>k6MeP(CL%ZN!$8 z%vw%Rt5o~|>x?k#a}#|Ugp?&K!7n9}0EOmnWvFz}oD91T;j&Vtu3)9Q=9Hs%K8P@< zuOdS#lO_x#!eFczd;y>vrmhc}v?SG}i~f`Hn29x!yKf>_RYdz=`D!l8@moqU+6 z!GFfBrq~-BujRw;fRLn!)8sJ(ejLnWMWK>DkIyb;Ljyo`{!qby)YdI(E0R=V1b~Tl zCIHdqA|M{Kj46DA%qN&K21wxCvqNJx4^e!r!yqGRsT8xS%BU<1VUaM6lQR!(XThBr zu0O(E%%+0QqE&-#T%Fs*ekn0;VjoBXbxPt)Y_3~n-O#-?(3;p=sd~B~O!XCi6AnAC zY!YQih4jp!v{)Q#;%yZMcF=8Cl`p*x&wN=LmNakKzI1Fs(ihK;?QQ(|mT zFe%@Z$Pxe>j?`PCkiubB$$H76yk#847ih{4Q00c94p4Np6H^bk(xuN|%U}9o#u{5>L4V6w~J{c6s()BpokgkKr%SSMnHq z-ZPEERN1wG2e33%W2W%0lnFi~+m{CBQEL+mX{=9m8M?8q@cxg zqJaR+Lp5@0z!!uFY9vk^qup(6oPh8)Oyf6}z|bs6ZWG66+DEK=3LI%6D9_{8Fk@G%ihp7%)BH zKx;AtD(y5%@;>7t7RF%Y8cdxjcWe;3F{^P$hmAR=>?lqh-WM()k6C!8D@17(Jl(Jvyls1WMAHqNw6nznJ$XyR=_ zR-X1GV|Nz#uY@#ClyC+R!OI0*1O- zYQLd@qS-c$_n`5BT50=CH6Ek_rceoHA^Qs`37o`B%DBgs_bhrMda;OcqZQ-bWJ-Kl zDn|}je8&7$2#Mdbh|pXv!X)4ll_RrTBbA1LY=*uETHd6)QthAw@Zmf;Xkd^%W{UVg zrvVBvLa8Je^B&ReQiiuA;3;#ANtmc9hzE&RG)W|Je>}%CRgQG)C33jW(m=H2Tb@f| zuTdeQm#$)^=0r$Zr|Sh%3zSI>&C%M?q^5Ds?sDB>m0TQjGTCCRGToaEnBW49PCS_Y zd^#w_wnNV=otnb%0jUz}TwFfuO4ef(nK5NEQ94}BISfed3d(Fm@s`6|G(05{(>Dmqg<${6S6)nZvljLUAXi zl%Q;yco2We4!OT`k)BbJ%c_R`%Fq)8`WIzDa_WvivLy-pKQZPeRUit7{9M&lgB~Uu zb{dKvBy>wDvArt++@R10a%O}r-;e!68*B+Fw3Kf@_c_!Q&qN`^z~&DC4g zvp3Ac1F$?=c2pX7Zs+lcMUUNR3RJn}{w*h=u7i&g%91Q5MwJw|c`Qb5axhUGcQKbw z{<`>ZD53(^DP9_XKn@13T?@rS?)ol2fr-Zk9{{>-ygUD3yyK^vGRJ>0ks^J&JgXVE zrqL>^j5h(K2V#5jg<{+V< zo+t$_Jv&D6WgkHJ^ZUwCa>RQdUFdimi%c75HgGWHX#4P91HWct)N6>~mfaH5zNcGZlBC7|PHo_wBtIGQrV;TH zb229%X21(G*C0Eyxp?s_C8@nuu2kCJ*|bwT&31pcv)5}hcIdt$YlIeRYt(GCntgNj z*{kq*p%oMfAjIOz*lRRJq$gXPx%g&FS2C&fLPt$w+{JQ;X&0!4YqISc+cAthoWg%7 z*J*ij6NJ75ee@#c7ZK*zfHkDkvb4{UlB5KH)-|OVYU&T%Qh28_MTVfBa?FX=rFI|YanmQipiXXL3VZ_US^3BzeyYk$B162ani5Ha)` zA8|yE@SVIjFKZeVZ4jdKfw%dM8Rl$ZL}-u%91-We4#vL&1*f7aINn)e8HT!H@b0T2 z?w4|uH~HR0NY~2Rok|K$H|;6l=nA89Q2)~y|L)F=pUOX*&}no0d)nNkuqI?$gt_2< z|2!W6-`xgXcFZmDKsiv4h2Lnc^*BZXLa3dC(o6dG@00mp8?XBKY3JWDqL9eOPapUH z9jg5FiFfKO7<2ns(t|2;2|`oZ$>Um<9C=uWS3eK#67DjaM-zFucoM!SbWJYD!rvhL zyJKJGR6vb}Xjwqx$Tmap%E!QmkWgSByMz|G5&1)%IR;%7`i$ljs)1N40_!T3AB`V> zETq->@kgWj@#5;}^q8YPV0{oe?vT(P6CiD4mZ$CAcE7vb>vcLcyrAOa>GjR&_R;wj z`VK-{WcV@Og~5E5fbkhf{-%*Y!3Ly%|$U}cf<#eNb2y-^;Ng^5I z72I4Z_?QY{pQ`Ea8o#k?M42RjCwZVw^4S^SY*I7|<|Z=UVxC}%YoMMa(M;GYCB)?; zl}-w$B;)Sx&Kx`NZ95`)ljhOb$`;?E^Zgafe7zmK`Qwkv!U;t+J_Im%TGO>5HbFkAp4fshOWuNN$2IMFX|NRdQWEMj&D zQrwAu>jLxPkp_UUBl{M5mYn-#HtgoaQV=rocZDvUIyGhiER#$3JXVCCmF{&!CyCUV z;>>gsBT4M!qdA?qmu?%_10q~2C+>?p4j2!M>;3WVhx6mp^LMn?>2Ja*o&Q(of7JOO zbzVfB|8cGSkJlF;u8vNw&G($BO@q2nJ^^MB%+u$t=Q@2^xs#55PUR5f&^!njq%Pyd=Y>>0GvKfA_Q#{Ewwy;2s z`W&rWk8D*I8LQT=69+TP!&v!;E9^*n#Tz?f+gC^D=i5geGDd7v=h(%=pu?l?=2h{M z=#45d5LE4R7Oz{2j2_4Grt_TySo)F)%#=|dz_Fse`?biM7@C$)DL6jNkgM@kskdXGc z&*G;*Tou^Ub?Zx)vIN%SM>uto;g zFrT5P&oKT+(B^f)K0|LeCKek2eV)ErV>g&T3+$(z-Y;Z0E1-vL45anm>WoL$a2Q!5 zAZ|~&jvW5t@$mn|xbs;Eweru6MdBQ=&*Ik?60+q0AG0Ucf%c@md!o^PA&OqwelUw& zCxb$?+VacB3tb9wCSDQ}@+E(Yp60;9)NZus>%)A8o^CLHH(o@JJ!4S`$y*o5vGlPw z$!&~bae*K0C{~ikf+!XE{lK1&H<-C4u)laSE`eLZIyUX`*)ONpw+*wqzNGdXus5EN zbwTF|%Z4p9pKV&2&owE{=bI8dYqI0mu!)Dv`l6n$g4F&}NHEB5OBv+pdH1t5_N^gi z!rusXx#;J3k!6ql03*+z033D8a@yNY_e-I*pBmcOe*z%U!gmH60-6p%W&Z*#fuNMI zY%x6oX8n-6W zE|T}l#nHnC&2_u97Pt+h5r<;4H##PC{A4DYGO+kFynd2`qk&SV+r*7JexJSFJG^G(R&`6guXB@^=3x$AsRc>HEI zihSJ%R3+CRVIDt+S+?U?F^ksr)^F4ZG1D&sUlI-QbbT)c{^fmMVz_y_elKm{+4}C| zDFx5{oeeD#3|)WxT%&k0_(gZwyQ5#8WRi-IpKfi-U_VEXpKF@Y(f4e#@wD6MnMhU% zSPYA^2Ejdxo^9Trbn6r?_D?tKWw5_|(u+`^Y%)qBL7r|lN`Zg*bYy}>p0KyGC*CVX zQ_<6PwhZ=j^tc?+^!X2c{zISt(9bu0{-Z45eZ%t~_%w6pKU!E^pZ|D)M{o7l{b>HL z`M>7>n*TqN|K|^`J>vW){L$^^`G2$7)%^b@o{#9U4m@V@q-D{kC5fyKmZWbfeY;=! zwxY?3=Kq@iYyPjF?}qL z(T{%gqaXd~M?d<}kAC!{AN}Y@Kl;&+e)OXs{pd$O`q7VmzWL|>0*LT|F90wH0Bj9t AEC2ui literal 0 HcmV?d00001 diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..e39a78a --- /dev/null +++ b/requirements.txt @@ -0,0 +1,9 @@ +###### Requirements without Version Specifiers ###### +requests +setuptools +matplotlib +opencv-python +opencv-python-headless + +###### Requirements with Version Specifiers ###### + diff --git a/runexample.txt b/runexample.txt new file mode 100644 index 0000000..f421712 --- /dev/null +++ b/runexample.txt @@ -0,0 +1,6 @@ +python3 + +Token +xxxx-xxxx-xxxx-xxxx-xxxx + +python3 simpleimageclassifier.py --config-file ./configs/COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_3x.yaml --input canegatto.jpg --output canegatto_out.jpg --opts MODEL.DEVICE cpu MODEL.WEIGHTS detectron2://COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_3x/137849600/model_final_f10217.pkl diff --git a/setup.py b/setup.py new file mode 100644 index 0000000..16a9137 --- /dev/null +++ b/setup.py @@ -0,0 +1,42 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# @author: Giancarlo Panichi +# +# Created on 2022/07/20 +# +# find_packages +import setuptools + +with open("README.md", "r") as freadme: + l_description = freadme.read() + +with open("LICENSE.md", "r") as flicense: + license_description = flicense.read() + + +setuptools.setup( + name="simpleimageclassifier", + version="1.0.0", + author="Giancarlo Panichi", + author_email="giancarlo.panichi@isti.cnr.it", + description="A simple application to do image classification.", + long_description=l_description, + long_description_content_type="text/markdown", + license=license_description, + url="https://code-repo.d4science.org/gCubeSystem/simpleimageclassifier", + package_dir={"": "src"}, + packages=setuptools.find_namespace_packages(where="src"), + package_data={"": ["*"]}, + include_package_data=True, + entry_points={ + "console_scripts": ["simpleimageclassifier=simpleimageclassifier.simpleimageclassifier:simpleimageclassifier"] + }, + classifiers=[ + "Programming Language :: Python :: 3", + "License :: European Union Public Licence :: 1.1", + "Operating System :: OS Independent", + ], + platforms=["Linux"], + python_requires='>=3.8', +) \ No newline at end of file diff --git a/src/canegatto_out.jpg b/src/canegatto_out.jpg new file mode 100644 index 0000000000000000000000000000000000000000..2941a410206b912c446cd6bed495eedb3d6b680a GIT binary patch literal 37831 zcmd421yEeu)-Jkm36cbN0t5{Zq>&h4&QHJ8mb$69mF?;GD33pNW|1)jc_l#v7w5D)-U_#c2R0O9~T zDk>T(3OX7Z8U_YBCKf(6){`e#B+v11@hM2DC@DzE$*Jks7^rDp(UOxh@-n@8&B4vh zO~t@3#K$Sf#>LI~MDK%~Jpo0SO5a83_di85w@I5BxZQ{0!y! z3l1?+x{ur2qM8qVdFXe|yjN9I*U;3`)-f?P`)F=qY32OM#nsK-!!zJZ;MbsU!I0>f*tqzF#H8e`?3~=Z z{DQ)w%Bt#`+PeCN#-E*C-95d1{R86@lT*_(vvc!n>l>R}+dI2^`=@8;7nfJpH@A0x z*o6Qf{mZPsEc<`h^$czoA~G@(GTI+@At1WJ3(_-WlouSR&&8C{j2!W3IQ`J^-$rFt zbYReOshkiPJB?uy(s8f7JpIG8KP~&;GtB@0$g;l-``fO001F8L{_>EX0V2Tl9b=X+ z=KpUc<01n$t=TJg@7#T(>#nx&D!#Thej54f+!Un@?x{vbUHrU4nEA)9ISlRA zQS(>INH0`tkOMpbMt(aPZUo#f$rwgcB_AuQk`&TiUdL+@Y)Wbc>z8~!LLbFnV)`)t zz~oX~`n_~`VL7*mtUyC8jE9Q@bB=@$iOvC`+u-D}Jeb=(-$IZCw3o&koSQbV`aQHs zTe3&lpX>No*3xJ_Mcjk<#B8WmQw4)`Ko?_3dhmO$$09m%M9Z)9WT3_)Z*N}Lnsm4| z-DZf@CapgLWgLFCRCP;jTl8|zw5^nk%sQT~NBqg?cAUwU z?;=51MF&u)_)gl}!i8m_DFVw-pLHL7r#3+$VZcn!7GOlSo#*gJ?^bIY7t}7a;?Q4o zH&kMIo+V{h`@BS!^gBItRa+tNu z$&|2CL7u%oADQQ%6qX+?Qig^)T8;Vf_{tfo6v=e-7Z>ZkvfCMWjPKy>jYp9eeh<$; z2y6O=6cx^4Fm*CLe>B=oYD#n>k20PV8Z{C~j>nQFwiD@m^9%L(N+{vsT;zHz`<`+T z>XClGJGIY<81a9yHwW5RNp&^^16Cx1#fDEJd4tFw?I4#}pWcm#cG zR*P^jo`}i{b?o0cEf!`w&9>5M=}Gzk19o{#uE&}Bq80qyuXj$$o+}W?A6Za@3>N4M zYYjLbD(FkT`_RQg+0cW#&B8U@PEBf_?5RX3MAUKjpsCYoNt(#oBEB%(L>7C2woQ6j~IfH!&a^q9B_1@1)+>4S3c}78MvLEDH zet~)>ZEa4m(z$<-v}*EgUTiQV8;rmBJ$I?j_o@7)eMn=ME9&V3W!4MgxL7OMZDw*K zi0vX8ZM48d$~v@=GA*If-C*j4d-Eo4Nmp6_=nKpvAzUk=9Py~M3JDLAo53Qsco6Lx z!j+!g(qk5{?NXiVQcCSk=mtq{)ye-!clx1GyesqI#M{8&7< z%8*tn?s&7thTAu9U7pIbU_!1~gG@!c$3`fAZfG`vD-1wYbEq->ov;p3(g_ouvgwSW z_wHXzjgJfmBaDAl4Y!uFY~!F8mAR(cP$YGLGU02mb9RC`irS4uVtZasvVFmtzPB8V zxEv6{EV>hq@uL=Qk2uZzsAa$XKrDz#dkdybI3q6ZYXEZYEPfMzWlMdUL&<&p~mz>@gww$IkIN*0fQzfyx(>{CCi6o zt#eJEIeM3Oh|9i1scd0DVmee723)B>JHdT?*LH>m1I$z^@HGuPVt3Vja3F^|&x+)x z-^YjLq3An^@Y2z&dH%lrL4=8fvw5D#eN%q?idNrR!SBFSK3rgjr*@@%lPl z-NI*|C(FM^RipwXz^y0}pCDvcWbG|@auEK+L>?>4EnomL45+MW!j^(Qr{evc@N9JJ zF~sgMl#JvYhbz3kEKT1o+|Kru=Pu@ z{v6~iV`?U#+>(8JA2049r*TT3AR}$ZvRe)VO6JCLm4h{1aMEw1g2vZXOUKZ6WByaG z#h23{&m<-I&c}beLDzRw%jrATe2L7e;vJKW@e%y3q;(}*>V?VnLhIJ33m@|5?6Y#G z6N$pAlh5BVh-s+7Z_czzhJP#1!+=rEC-BLa>y?}H#MTnK@gfZI^V=4^nl|&7V(=ws9VSZF6Jmndg zO7oBHwA7i%pAnm5dfup5Ha7o~B9V^$p8i%AtOE>=3IDzo3bZK-Yt~>3#|%lomax@L z&8KN~!-JlyZr#$Zbi#n9&Yj!Hfg1G_Y%5uG7VRoValh$GZfbjF&FUqI-$%wtICC^2 zEa7e9`^*89913AisH_C1*UyCPe?3Zyi@G~4zh=nbphO)YU8X6#i$ zgaN|VoaepK&q8F?6|ZvnA5k22Z}Jj`jWo%0@;i^pdyioNd-HrePAp;T`GfHB`5UGO z!y?)Ed&Rh@u!;htOXMkuX_G_*2TG?m7%$C9?nt{MebHJt-d~n!8yMoXmF3IXQU&Tx zh=cDXXZY^1L?U27QIVBrNVAt48?JWl1b3znp1*m+nAM2& zAE$kA+KH6{z9lw?0d5a0kee_deqXYe3E%wE>JYvRZyKdPuAG#4tz*J~?4EniPMRHx zvwW%%U%}-L$^Jw)ohqy*_o8`S4BANCqp1MOo?-88hR5yv!8570Ab68Oqvxz1xzmLT zs4|6hr|Nk&=~hF(%0p|kLw(!c`zN>&XGjFv_T(;0XoQ0B_N$`U_%azEXW~0 zV#6pdxK9<%FTgP1BWP@ff9?1d)5;J&(0}(d?85*;Dv`eB^pl;lhmmjyV_5)@1!4S8 z7bN->s3e?hEy2l7P`3un{o7QN?Qb3-4-vca^7indi3J0WU_hXSUKc}MH0{`ADc~*< zi;c8+`dj49xSBRC3Hf^b_SkRsyqd^|(~8jhjr={xH+||`(em%zmci+RXF?BLFu)xC z)?dVP2GQoK#Xc-LYvz9EI94Oy3TL5}C+?VG{oh(v1ZA!*Kiy^?{xB3+IdIvIE|xA!cC08FHK&skSb_=+*TqETL;(B67p z3&%atl`}baa0Pq%GSIzVoxnfjf7mBZEOu6{2SPv5MD{+3bVp2$*osf;!_1~jAA%+o z4U>d!Iw!YL1{YetehmLfZyx0^{s|p}gzk~AcRCP_C!@|2?T!DA7Y%OU=a0GTJ83rG zA-eP}i<<1K$EZ5)8oyBJW%G!m#UEJ;2repQSZH^0j*hysAk1F~{tchP|9yyzYTo>~ zg-Tw)fFO~C?N6VSB|Z_O;46#%rwa)W3^;o|3O&BjC@d-dYbnqq$-qJC^)&IxWj9hH zN!Z?~-<5Y5M|ifZR_4)hcxcd3%xFFMx|6z&I=mS&GUH8Q>0@UwfDc)9t|n*sOcCaO zlPXZiKSEaQJr$vftN-@dms)T@!{*V#0!^|zK^0FJol`Rd$3PG^*@=Xi-aF&SY^-O~ z%@-51N0z017$DXX7VZv`pAo8Zt12eRnS}I;FKX>^a2+f&5L@G(zO_=5^Opyts!g|3 z3rO0B`FdmgZl?wXM78iliCTL~jfs%>O88CAR!|#jFn?@NJ8C7w8zojoS|G6tPLA=R zW?2lRK2s2@@e_mqKhFG+n9v;g(dIMH%{Xq2Mn>OPP%bTKnNr5eJdf|&*poI@2E_U{ zm2ebIP`A;k*~J1!Ge z@isO)!+=2+H7uXmP=rV8)^bI*+!oE^26O^D- z`|r_I1DNiwoIaXCuP@>I-_$*TwZScq64shR?CbFUlTAU%g~3UK?&Y$YJsHJ6eWN!D zNY;WT1n)JI_S+w`8NT)oUzR&2)1?tXB(TDBrD^@K-Q(+!bPfBF&c>p@+(mlM;F!oW zNoF17@3*N|A@FQ43MVId(MP6I}OYt*&|v^f}5j1d*L9y%5l3DC~YsF6}J(oKK)iIv;yj`aTKR zQ(FCsCa8q`h<=eJ9>`n1TEpJJV<_KrpGvy80{~yCfs}W!$3qujGB}-Pb=hvx#phDQWaBO&@+j7)yuV z{-Fc?`@-=b$Ck5E!UtpUlbft-3DT#H_Z*7LkHsGk`${K_dY5~35O)g43|_(jYy^hK zCrB^=Nf8E+6+eA&ZcdwC-xk3{{W{AO*SZpx8IDHA=^<1=Flv_7Y_PJ?Jv`OrAinlt zTzWyxr;S8Jl|m7GTCOy~Ha>)2fQIxF|1+>P6#Ib#(p=wg{uF<`|`$?&3WMjB-KB z=bZN#`VxjFCIVW_6#28O<4fKjreBq`&N!|fM*8NqtsCdYxGgPa^0IlEVj?9`)<}2| z?DXhs{9I-_@}{cfmsan|AoiCO{qQu7&N(1JrM4XEk5Eim~<724b@f{o>sTPR97r1cwhDk5o8t)^d2RhpX12VkeGK17r z_sS#E${%_H8|5_pFhSU3g&PbIy@o415r-jdcW7+~P<%b168Yr+u|>vuFRc`A4c@<- z5vhgCLc8!)q5MYK;i`h_A3$xb$%Iqr1S-rSRM0A4+F6*dR`}~?Awf0Y9(AE2C2h;l zteS(ENf+(qqp9TK*kT79k4-O)gun0bGd_;~QH0hrE@KbwRiw~2NA$#pq1L&iMruh5 ze*u3h7k~4YQ5)W-XZm`b-0?jWyCwovqnRZYC(haV^5D@E?hfM@q(zUP;9S3JP5z~j zpNa%8`Nw9H`KO2qD0+3G`Ro(F)Gd1X3#U?Of8?NsNRdgTIv;CqzwE~nA^mR3I+<(< zBm*S_(m%t4YGj|PI34sAk|2xn7G@MIHTsf2mBx|eQm6Zt$zS~bW<5K$aHAA>SBAN& zs?I5WENY;@`~d-JI`QcSF~z=R?mg(sxdSGfm{5fM41VHmhftB2-QAb?TIHyelrJji z(5Ft|TukyarK3YA*q7glfo(pmX-$A|;(^!fNoowS^G)dVk`%HrvC7L`2>xfpobp_$ zQ8-e|{YlvWubQ9bsHfn0@S4D>`IQ2=mUrIcv*FzeLqFL=>+Q(UM(StKF=+Gi%Nu8aaxN9j2TUd@4Gq67Pe+o_QdGUOIHkuskk*g zb;u?bbP=468#Gw3YGtSzqe~MArm0P4EH&KzDDWh1A@p(M*C*k)U*Ml#U_fC444?zw zn^B!)itNMH@O~=joE*O*m6jcwUDp%xXuZDsw}31c-UMYsS+8~HY znn5B%&3{q$6C|2Ba(oDNg3C1OzQ))curun|ZUP56YVSgM zK1L&$aZhWa8*vg21ZU4aM1M9)Hwg`|={SG^xY3=e@pCQ_e3A=d_dJVtr_B@10vX?N zm95KbZkw{1>IN6W#YUWbn0U;X3vGNviC+&yO4YDg4{$3I8CX`*2!6G|*yi2bn7sn+ z6S_GX>PWaC*@a8=3GFIuO%&zlt8~K$;IF^+jQ~;N6RMj6hCf|5hAv7DSrXFuPSY41 zQt>g7u4!IK&2-J=QATZSMo&M5=hZ6ZREJ>stK1OBgXH)TowyY53f}w~`K~@=jJ$2QMxwnX9Z0#} z6H3~DlyFJ|Ra)OmuZB`_`WZkm0x z@yFOfEe52o%6fYmrDiXAl8BzkC+N9eQteDP&~JXw9Da|J`ki;;&d~3|$|^@~#&8cx zi|=bCKf9`?hv`lYL6r-a8qXOe0U?GutMr8ShFfCUj7WE^s%ogfMYg2UaACU|{WVj; zPZ_VIbweT6Po-L3lNx#g_^B6#T(cK>rhemboF3fh9wgL|gJ#^kCt*4Q397YoPrRNl ziNra*$5|DaDRO5XGy3(g`q?rS3OGXcJEW2u&*Y)$g`*(}tVG-6k6`g!{OS-{gT0Nv z===Xo#7%2@<&1L?-0+BEGYzVH2(6QQmA?9SR%qlyLG1G66-BdqqknbsG=m^A_` zKuu{Ttf5rO#}5aIHh?Di#0N||hPVrEJ)SAb^E|67&l26z&D-|Du3nsUX*wZxo?$6jdAB9g^7c!88Q)Mr5tLi-bMEY-Kib(3RN1-nLgaYv%^ z!W22#C%A|E84H?kD$+SDEZ0L3&lpEr0BO$|fpzfn-fZ1OqYpFTd=&3YCSib>WYfAu z^}VL9xnzUgQ=41lvJaaw2-pbTnEljoBz9rMfI39rV=XRY*WFS02KLUz=NpkWk9 z#2ju+kk)!MgroEly zLjcIt zj%zg2@)uzq`ig=?jbrswy2$m6Sc%?FqQH<5y{-yCqrlylcrj^DGIB8Ieg_UT?`A`l z-f4Mv_SG2QmuosCa0T2o?(_C{Ey`y88Jvwm*6+4{c_cXfxEK+MGY$EXKw@$RNgS&5 z73<4db6cc~aeB@FYDq>gxu>meRt$N)W)NtP>Md1nu?M{`Iu(r%-Rul8GhFF=8aU=E*CFW2kG64)^cFcFj?ZvH(#(+UDROWkGQmKt&K^)0Y`6 zqBHqx(-NfhgM;cjp~!KCGA=~C@FaV#+r*j*9s)LjJG_?!m2Wh7D&Ut~i5$ZKW!c`8vA)$%-GDx9SRfsL~kyFNDaJ|sYp0+V9iAr?*^cxGAo)SH` zhV?thx9vV8WF)n3Dx4zBlT#m<$Q3-}NMpZjp(*;o=xA8AWzN~B(KRP=*X_7HvXKZ7hAXmUAd%ZJDPxJ?Us z=~Ej%<}Y&E&!>MM=!%XfC=UVQ#S#TdclQd=X8Cb=7U<`72SvJw( z8S1dFh@6PH_153(^{xnc@?MA9mCMwXCph79+bAS|RARxU{1yfnNM(*mGg&VSD*e|y zK^lQF-0Wzbe42l!Bi~YT=(YV-d&8VxKk-RT&~?O~frtGmS#PpI-WYGtTKN9&BWbhB zM6NT%fhIrD!h=$4-Nb|Dn&r{Qr$_`Bj*G9?Nhm>T;Er$nhttj3QWGi*T}+e}Mf2~& z$VZ=sC~Jt*Pt+JlFnloa^+21!mtn~_o=ldft)GCtd=PJ&$sB8|_;l|z=Yip9$!SYG zBE1yJcK{h`yP4R~XWLvb+d~{bD5CP_54-w%YM~> zDlE~>r@YUiUo*3SgVXpL#&sUVGG5HRDzw>SjDHt#Tp2do-mx#Xx=OCel(%F#AK-4N z>Z39;&H9p1pl>IUE@W$H9PyinPsdlDowOQpxl<%Xp#w!XQcH+gQt8Mjr|2QHkc`aN z#CJh~BihR4H*I~zP3Z=Pix26@_JlOmJm)*X`t_KhcW#&%gR9T^Dcq^A%hm1`8sK4> z3aI#lbtDrR z4zE;6U3X0$^Y!4F32^v&0p<4jyAG<@k$?e0s?a-pk1_D58a&S>(69!&PQ@D)arNC9 z>SOW{bTjr>ZMye6(Zi+U{jN%|iAkUf!!J^y^6RJ09>gn_Ga@_FB7NX6s=sQs6WUY4 z=LnbFEtSxB^$7{-r}BS|5YnlXd}{l&$D-xrZsxEkgjNkCXz(J z_I>@rP1fhV2)(guF#hMm(V4mpokozFM*~s5%I!<*Iil8yGVo{#I1&bs3jbZ5S0NFj zV6skGVBeDThDtL0)y=;ZviA3YySXr)Rc@D=bcz6(wSB>i9DjFJN$k*_6!wTxZeT4t_ zcrkEtEhzEBO+@ z8Q_a;>hWWi{&~MYA8LuXMWxgr!hr@~I0#GFO7zJl%EGZ+Qz*vfLsQkB+_$hH#ZtM0 zx`O`8f+T-(+hOF&K38VZ_2PJD3_BS?m1Ei&kbX}82YJM&hfxiyka&h!2i)nRV$*2d z@Sk~MG6Ge8qJ)dZc|_D{mukVpl^56fC4uR7oY^gMG@Rs*QyExuzmeLbB%gRMS#?a% z)YYLV4r`d6Q>!TR+7@MP%g|ANm=}PczH)t_81f*W&P#Y*8_BuB@r)X+bsR_JeOd6? zQIAY9Vf^pm^HtLi$s<(wA%!Qj^U8HSrTbjkq`eQ|Ss2jvD4{!cM0y}u_5B;$NukZB z5&oj&zYf{|+DmC>-?f?H-Qd5nH~^5i28yFS5k|mb`Y%1;NiWCK_gWnJ96Dh+;X8$o zqEvomuYP8Q5m8o7JivvtvG&g7a$Z~T;3FQN*EvhosdU4PFVD)(@)iYN)#`d((p z^^+&Z@qfVP(8vw`>^(spoBGu1uwsg#nBRm^3tXEYCEJLM&-zE;j}&zT@l z`JoqQWgHQQ!z%dc?{n=o!f)SGXtZ*<rQeD)za6jAv8n}FX4#^xr`=7p3G{yzQtzN`?RC2teyHw!@PaS1-jHeu zUexNKhLc@TUN`uMjtNvK3%=r_p#GeI`uthb?!}M$Jg(pCs!2$gett;wc)}zRSNX&A zo8;=IAeaR=lYDZcl#TH!`iRH`vF+h;8N3S5tO)}heu%46Y;{6KNE+POi|LkusP^jU zo$0=AdZDW;J^MzttK`kotkiLok=MfhUq3L@$ZnQ0&|e!B1cs0nazolPNzek4tBqd) z=Ih1%&)HuTkSw`NuFIz5ikjKNbN)u1@K?=GO6Rv8aB0D2i$5J}S^V*@I}qBE%Cefh zRcb6fQ(Xx1enJ>a(EmF@nuE1%(*llJ&Ucs#C9?Lnq-@|tsDJH&=Y9D*y*s#{t@ZRz z{{06B{~wDOJd2V`0w4&32d-&Zd+*bKX$+g_g8K#?p*t7&N#EsX3acdWRk{DXVQ%rr z)x6N=UfAA~%AFYk*D7x@7(fHfgaLX~w@Upf)!JKigxwAhCjg1Q2$ZCx6_D0bJN>+u z^;XT9k&cn%@~UK1hy=rG+-Kdr@SPFD=$ghhM>mP`%yd;=x~aBF)WM$UvwE8^eX3u<_UB3toZk7@(2@v_Ghsg&mMiRL6027$VkFUd z2?L}-ltm5&HMT_8T~qS9MXGpQYG=%j-J&g4j(r(X&e0h8gvf|qYVz{S9{6ANzsm6v zEV~OB_RDiRy%sxzGaq=mmVpPtcA2i+m>edDrfP%qZwWQ#H=OE3ih~9D^6DW02e%hO z)t~Td!q%3|T)uPA(_u^+OI!#Ye9#=vOKueQ$@`vItt`PooILaJg&PK3!+?S!DEvA9 zcXZFHrv^^0utgqWz&>-^KQcwMN_QVd3c#oa=>;pq&N!(edvIlD7h5l`OH>3j=iWI8 z?Tv61T&b4Y^4xLbg8{zV4Z6O>k=AoRn|>K`jDOx|>c)+iQJ!g$a}Ag2&-<31X~TG^ zsHVOzVPnCeB|wf@K}-`#jYgA8!H7pO%$Qw+kIo?j(c6Xq3K!MinWVUZ*s$?b%RFD^ z{+Qp)sB~Dz^URCBA9ixp+{Me6Uw|9YbTB0}qfA#9LnGcBcQ*8n^rq`XU2NX(a{AyB zysXA4k0(a-FV72dUY(5=!TpMfBsfke)Q=@~hS>OQywaNsvwLljqG2d1m^fxCB^u17 zbGd1(e;{*P^)Xoib@E_A!V)Tz`y<~6bvKx{P*Rfa- z5euw!rh zhchR_ldZQO_uyD40ZK&V*S7n5b&BjE;}fsDMvV-vtm}KrM9@*KPVU?51&Z-g$k>WAk$p5S_bgN2Dh9Scye)VXyZq)xUvjqK(W z8%a;U)IlV+h|Y2nWF+FdkTx63J0GpQMt?$Z zC1r?AE!k(%dYdBuKqsk+MpW&4Yn=YWR#@)N6CL{3LkQ$c_FPV=K8ND%7; z_tx}Q;-{z~*>)xLN2|MqL?R0kyY0MMoG*PFSYDo(F|NONEU!S;w$fg429oNS&8aTM z9DU1L$mi;2h)miMn*wSMi$`>29t9EIG`T0)RVQ9M2JqoKiUzgT`3!2}@QEiLJvPM> z4hXjvX+N=_+(`-^Qu0N+?}a09NgL6M$K6&Z3)3@%=LA;=cQM-QMNS zM=`GrBB}p4{^g8Tg!#l8WkjYhsh=(;`OD0=h#V7(+-!XGq?KD46z{vTLNXv4?@zY;?oQQx2jen;Mx0&f1M7rhy+A@i>3E6!Ic;ODeokbxuZ> z9%9Or5mZ80&kGj##c>+upGrpeY|<@{gU(cmlM%_Qb3S`dXOR0q?5o(T*iW-d|SkC*-X{N4F*(TBQ;q@?s}v3`g%K z$KdP#7@wTHHS{eh_zz)S9sJLvJQE!_)_zU@Q&=?oM;`Gjjf>CbY9aybij(#sFks4Z zB#SkgP^@L7W0q4YxXFq7O+0iZu$_-dQO=&YMr8?gv!qGD7#eXFM{qLY<{R$+W_f0}) z%lq+h5l#{fuUo0QZ@RWyI84=S*_H3;vs*UUgK?UemV*50hl|k#F9m3&^aBd^?+c47*tD{PE)P zvEDd4D}JfouNQd(QV7SN-d^LG-z#&(U)mDY^g0GjfS5Lh&__-m>pi&{{`GSF{GipZ zd?w*u2Vc7!1ykM@kT<#TQ3&V7=46x&GmaM!fzhe5{ zd?TzxAq)TV&?Ip4Oo3&|ERC4x(|C^c3|l+qDXV68^6=cJ6ph5M)YYyT?*b_NsCYz3 zM}1Rn#SMblq={7>X=Us7>Qgn@r>DoH>d3>6HS!nBZ)cBPRwzeZdsaG!o5D4^l8X%;b(=$aPjr;zm4&`!JN~Y>;aU}L{U^5DzG@;K(>o^gmi_x zL2tBN24!?d%ZK5Vx&PHfyb4d@E{ci%-XS^Em=aBAHWt+M3G{w_OYg%*T1{VyqH5`E zDfWfM+sZV7?R?%M zH9jb-+38ri>8{X^O|J*0F9gY_@``kC6IN`&cv z+i+wmJqw75nN>kG+aO^$D@+;OVMsQv<@HXYtVW=k+pwuBRE75EhEGo-zK^%-9_C-H zJmG+RvMNDx63r2avxBe?ghW^kyJ2>vKC&q6X}lg078H#tTL#mdg_mGd&Rtne_GsF{0cd~JWs=BoYZ3c>6b~Ceo&dP9( zrwc_8)zggG5suT)G+v04eYU`z+i;t5R$$)DlkB>aoP33<$#OH5=X_(%`w!o3eFgPChhIyAxwEc=-#ugz|pIO`ZY#* z9viU|Hyul3p2fAw+t&?+3CRGDcUERY9o}2bHmk}vD!hok{QSl&v`lC;iO(A`UOm@- za=}n4dS_fzfwm_~0+>!d;^BW43Y9?P28tRKm_+QwY|p=K^V`%+NuY)7H{%sOCTQMsMQttRBR$p}--4MB+7;t#($#Ola@#$f z$zd~!Gj~$C7w*bL1N09)>JutPp_Pp%YX2Xf zd%jZm3s{CvDSGFl4uel&^3T_rj%BF=lYeYL#}_m&5{a%V!lTZk?Ya|w)Ejqb>N$-2|HYM^N#jkWxU&5kK_gE$VO(mOx*LKqImG+gSNY;@W|__;)5?dsl@|) z@j_X3)m#$q=H${a$zBqU=Bx>ij)q&BMd4vEfLfRRZoO@wTs7in=mGe-K?!44cC;8N zp@}bZXm8A|x{#`B&Dh1Bec9$5vN)Yzw9G0X5ba*heM;5!!_>XH{z(@n@|p0johd05 z;repn##4>_LVe~H0~t;h#ER&*NC$Oo8+vgZ$}$D}qxRU*>anrK_Z5dTP_&^7A@gdf}UqsYmPJH0zM!BsRPJXv>x3zMW_RT%$wPcUfAUQKO{@oDUx$ z=gjKgrHX~)14-3rtsG^~@2?-EbhJHw6Nj@{kx59jBSsG+W+0X$blWO0(J;`@{;DU= zOkkD}j=U(98}jNy@@GkHsY3q}dRbPsyPDkJB)T`=kXOSS@@qRY#x!-ioEW7O)dtJ8 zh{&>H_+{9F6g}}n^{+vAMn0mV*nvLomkXj9P)-F(^dftb)4szyAKz<+h&-!6|HoRb zaqe^TxM@BU=sBL>4Nm^7X334FM8w->o)5)Cv_s<&w-z@fB8u;q!+2G6etXQtgN&Hy zTN&UAq)39Ih~&qckaixLY1tI=w~@W+jGx*s%3?akzBPmF0{BpZpn_J=N|AM0xa6&; zd#i5v1otLV40;aUm28Stv$CAMT{eG3>jbv^EH?*V)J?vyB%SIlr1i|8q0E2W@y9p0 z{qazo2l{D!E$rIj)XC=$`eJ&0-%FMFP{@TPUd_5VoHVrwdb?HyEpcKpjm*YN-7c29 zR~@1X6YG2X(um#+K$u95I?7)7@?kl{rAT$D*t>o`1D?co^d%H$IToQNwW|msBV>h9 zMmQJCovE0yorl#!zGLNiUF3w0cU1UMaHkZ|_Qin^xYsFtjSHptWYA^SLhjKoaFth%NX@SVY`K@$|LwV_&M@ zuaAE5X-7C>=5nWfd*a^*8;IX$2ZU#3#B0iM?w4d2=4uCEb#;){XGjRdo9bl@OucaC zk?ihmC&6ayT%!hM}|E} zHZ9>PhFyjTm?>|yV;x7OM*WN!+Bqp%JCz={I)zZ5`uwt0>u#=GcuPdwLwxzjp~oRm zm#(wxE#)v_XwWx1D5tdXn1;5C;+%byp6gzbASA_h2p2ORaP!M4b(n=p@19aJp|GpU z{z&fIC66mKv3PKC7zA-Q}vTKh&>(m z#m6Le6wS>*4-_F+F+q)0LgmS}-x|=u!8YG+B-?H7!>w^XdM*C~hf2qNR9q>~N=X8V z7zTJ&Hw)+1q!6xhD)3*?QmtvTt#B02>m8yjI#UnQq9V z=`aw}K|c1w)CCLu2wTQb!TU706x=7S2>)>B4zjad&ok}0roXT$tP;o9@sSz#Kp*vQ zQH~|4m1?)K{Lve(#AjP;#FT$kY!a||z}>oye3wb9rUWPfh&jWOLYD0_)ksxh*Lmmf znGn7gqv~ShsQMR2>0wpn=GZzMv;>#SCK0|fwG$(Uth?A^uXADtW|~AwStXH*TjRG? zm%6AxF8X#!0s@IR=I zzMt$5dxgT~K>u}f0)hG$YI)!yTaDW>K6tpej^L-eg}HVE94zAIl$Dr_D%yz z*P@j`^jb%^rF*Cliz955ww6Gu0koCIb-KUL6TNr$zV8Re{Iyx_vEKO)myS)-^+Naa z9_tdm9~Ft9FY=9=hD|Jpn7&?qGY5L)G1YAxbldgT>*Oh4c!Xov25H}Ol4W~TzWA6|4Vp)8rRfYg!7E}9;zm&km&EoI~3>+2DtVD7&D zhFy2GE@^)i#;Y)yGnx0S53L47@`9h<4UB0kvGc9@I*4Wj(Wz5}iyY||QH1o81^yxl zUiRL%t?9M>OpF)eQo9uuu|k5Y(s|ZU(ByxIUkLV$rV7isHk}8;wOS3LDj(QyYxSnV zn@`{#-5;2s_ypXChtmbxG-9O%(SmhB(Wr_;~QH%;3XS8ftE zZN`$Ba0xPdl%_#+0TAgq3T<8COIwKT=G2qoZ)4IFK7}Na`m@EU*_Rh3 zSCxrWu{#v|ld1{cslGy%;|fT)Ddko`ILWpj&v1109*EenhLdbMJH~s!b=q2tCQY82 zzCLkaK|D2WJB6b8o!J7zL@5^mO`zH8>UD-maE-cAb*fv@vXx{>#&RHi5lKKip zuk*ZLf~K+>jZ{Bz2W1WRbRl^~Y#=c|lsRjxDJnUd#i=xYPqg|Kx`CH;fNARW@pLr} zgvCAB<~fRL@~*f=pr?7M`LOHw3;iWazp=gKg!%u)+FwRR{c!95_`uN8($YO3J)?j$ z(lK-kN;7nKE9ii9i*%=SNjFG0(kUPfA~^8*z3=;+`y9_&|0mzI{?B*-ti>?1-+N#C zbzPeSjnY;RjJ!I<%eCtKI?NhRdftsEgc?Sb4}6tm`nWW~N@6;evwBNUkg~zM6=t*S zf1zeskhjY*gNn)E-^(Ngd^quivfQ-kmb`s_@rifyihnAvDl27Z4bT=WNU{fN^z-kz zdvh`x{xB#-HE}lu&%UKceB-oyjqO*df3>^HNzPtWS(%^^(ZybkrluL%FOk+KhWGZm zl=fFiKO=on0XgvtHANieG{V9n?RSjmoP~L5#4FrTIsM8v6HezH2A)+R^Xg^&26$37 z``E}>Nf=|WY5mwb3?vdNHrMv_1Da`rekQUF5Fi;ixh$(HbY6X)Fh2WvBoS__V`1)V zvr^44-C&R1eNrr?*o$rvKO3v=esAyBkq~`fC_K@sFfkX&rle3O@K-x;GIoRVLv&uB zQ$BpG`t+B4G-mn_Xt1dKACMVI@Q>xAe?Y0fo%K8>e>03;a%*KG?QWF{v6MPKjF>&6+*+Y$b zoQxSQrmGcEo)wE#3g2nH#0doR-Cs%JfeILHGCr^l74LgvS7Jo_kUU&)2g>2K$)kse zzY{Uer14>ah9J&3fw2*(g((nrr;YZmMNIpV-^>7{Km#i_!enrZCp!_%TCA#~i-!}4!pb1EGvC^FQ2=nf2co#f@w z%DooHrMK$%s&X5&p1;#vnOC;2s(skg7|n8Bt4I3Bvowm$`R4@W&!CrnKeUG`c3qSE z-#f4c%Gvv;FXdw?DL*!B{X%M7(fp^~UhiQkPlP-w@aa3TFIY44E!!vUYHQ`Pl%}5P zn*y{sQvBG;v9iDrQ%@Wn!fpD(pjZmuLZYYI(BP?dsSZeQye#GY#L{=|xd+PZ->HbN z!J|N&$&}qIy8po3z=E<@j${!5yd~k`|A6G!JSg@bxKv_qf_vI&77p!);$QzUhSTQ{ZxL&?!CNZ-Q+pZy(~p-wm^a+(<7LT`IxombWrxo#t;%h-CGz7c_| z5RfWuFBjWKq+@PnnVb`d{9nUSeI;$$Xk?J@cCZYdY!4$ZU{Ie{dpP&ACTpaB9c7*jkHlR5J3c1T4FOj~-wd zV9vpyxqKlD_4nCk=Vke%{EXUQSvj{n)oNMO(wtY$XX7*5&8Yr~{f7PdYux=>}AQzXEttLD+O1!*R)-P`zcfR?4yC4$nRF(Jg zl4s9M&w1Zurc}>UT+SET!SX{uvNxFTTbU0mw{U+0v$AaDd!QMS?~1^} z&d`8}^1c@7msXIenFr{hKaCozzATe&;h;Tp`U(>7!wd!Ja#vVFFpXdIe`ttIpZ z;)i$qRznrXdU7~uN6Vp93gPM-gmx9~)6qNTp|R`zMs8Y|Lo{%n^5-NE{=JUaA2YhIuV)8w=LiB6aMxAu%&0dRC9>a-P^y{-c@?H z-^ZFmh$-y*i3WwW#!$2Ry8qjS@0@mS-*z4W8oX6PJJ2{V5d8yk4pJ>+06(<8Ee}@( zqh~YiBco}q2SgLSh@dvm0qVUF)=N-Px*)%3M%7q&^kn4T7~|7u#x6;1HS29~=#geB zxU^(hOA*`-dbXt%BzLNDA4fK_BP=pzh=i>s2x%u25My!D4Ka&o9<5a{4VpHn#ZVoW zk~+jyb{|U#&Jc{Asu3+_AsEM&N~^psXEm7JInnbky=mU^vlr=76*1Yh6L7n#vWI3E z(GD;WFY$vu$o2%*{x?h1x%agL!lq=Ag^qF7953i2k$+Cj@X?PxwN$Rmpm0_y#Etq% z*_VxQ6X74vM`4Wv~4L`b(vD1HY+* zyLGB_=MnJqG||7+9nct&28AndB|Yt)`^I2ht-&}CLiWA@-0&^dtXx!!-}Qd zf3ImHP{`=EKrcr6bM1i32>g;W&L8WYY!cqXlCT_3N-9a}CGhWi;9;#es>lTzT1L73O z7gu~98xg-yC^c9SrZ&h}iW9k7ZQedYCd)Vmu&7M{vM(RDqZ%R{S@pyMR-3dwBgxj>?!og*pcxMFn$}ZaUwB{!4zTJd(V?YzG!J zJgBnBNcNcZUI?^;Lincc@gh1R`&>fS?8F(Ra=~;CyiHPVGXfkhR|79z$csJ%O497P z5lM$GYOTCd3Us-t+YjjUgr9MoA)_0y$-aw;Ok|n#)ZDsil4EFR(zPEJlx*x8OtU(y z2AMXXy;*NEsx7N!_Z`zs;2*@!$+4jBEv2*PRk7F;0MX8u=nNJYK6~l-6)i*Rbw?In z0-<ryT@RQW1JRgI*cAWmV84pVNfiO7kz z21*jbJh^Q$v5jt-7K{Z*{h0+qxSnx0IjpU_N$LMEygbf>c> zt$OALiG3nMWkLgL)KM!|7z{oy(W3iIpgt)uvyJEC{s5JI6eg5CkVfN&07b5~uG04L zRZ=*1Xu4HADdv)wq+NiSf6``st(V+BFi%*_{tO?xiqM|3yoGs}SC<-K6NI!T_g3A< zl^#4ll=knw5&=h0SB_dOk;vY}BQyQ>O6&!~koR5vK0ZNN*}2yS6YuUGiY4r2)e`Mk z+HHV5o5pcMJDSg+IJDv7h^vYx<&nJnF5Jo^+N>JkQ#Ed||l3Sl+^M`t<0L@n^$>;uz6 zQ=!d2@s4HPd^eWbzir5W_QnzTQ`@(*w@LoyNKNBg<5&&u`afOv=NnY;^ZTISpSAji ze?12H3>v>oZoF2#NW2;vd=S6Wufz1ak~}^gen%8~7nG;yqQ*}uXX*d|o}267B~{O(m}7TRF|SJCpU&+Y#=HJ)SSrD8oI~5+qDoZOOru|6%v)S9MFl zz|^s(z|+8YAnJ+v_~w1unVxO3u41jFot>g*aMlc#b`!eYvjHYopAj|*R? zO}11|V?-VYnyHhw&6AeVS|=_=J5!!;)=|lrcHk{Yx%}yPwKbVe=WaW;r?t!7R2-q6 zbZa=w+1&OB(8OQ912)P3e|rk7{iT+hm;Aky6-IH6H=VN`_L4v8OMvKUNdJ^mlG9B8 zSny-Y&bz6$4`jIb986w^l!DlT$G-ZT^ECW1J}w^Vi2Qj$i;w#_8KuZ`i?>75#qDIW z;JE$K_>auZA1q8b*J^&&p~Wyhp)Td*8LmX+T1(b*diybey8aoMf;;x6^zziXw|Min z!T9n~H^y{en!tW+PyM%GWxHr^@=rxEg#u@sinD0bS?h%`1478);vB2mINt!V>P$IT zHXogw+XejTdlv35d`G5uo zciPC^mpk)%*%o{ly}R<_aM)zmP>o=OHa_HuHn)m+H`WBxYAZ8vU~#WXA`F~$Hk!~hvmUjPt5hqnqZJQ1oCZ+ zgw0KkmGt(oZv$<)Vz^e9CAQE;@T=U{T2KzIp>fd3oYtPNb}}`%v>fX~z==GD)&(*B zy&xqVM7sKeI;rly(CZl*ImY~bK#|C=a(xQ+#m7w|CtP8-CI5gRtn@88XoabxOZ7Y6 zWiw(wq?Z6-x15R~hERe1^HZjd09Pbv;5iA|YJGwabG!^i&((E#=hDcn2Q7Vlw)sWs zp2-~zhQ4iB^+YuNHD!{)nQ$dTtq@1qbYVw#%4-N3UT?>${EOKvqrMiW*@LQ$#whn^ zfdV=&FHDs#c9^bUNW#GVCXEh^*wkVc6DR+{@~`Sx(u zzRehe=j`dF(Y>)2Dt5^&>HfqtiQ0}|_7@l8HivZwXYX)kJ^AAkFHm$bR;|+zrckoV z`8d3NakOvl3m^@+kY3%IpDv z%zntU5&RDZqfaL(!P2}s0RDM%+wzcnI;CGJ_e%?v4wb+@xQk*OhP|4B*}dbhvY$pT zVN+ABb(dAp{K*{hG-5?aWq_;sDdv;qxX0%CuSH`Zj~ZZ6w38I*(%aCGfZ0$g?GTm* zFeS!Jj|L&6yM#Dbs=x{IeEM-eKS?Iuhwm<1nbJ_4ALz~RYle&8cTx2l;R&cgCdhE( zIkyUoa3qolUzmr`cbJdTPD%`KIZibSg`Xb3T8k{JDlh(^jFGIQq%E@Cd*_X^52vMR z&p(t^oc%UU%EN(X&GMVd!@1bvx1Isgbe_VvvUBLeV7hNsxEzdDG(rn6K2CZA>FG1b z3B+@v{SAtm0|MlfJHUUFHyJOi`Z4eS%Rk>0@_)7By8k0)aB&6T|L6&(r!T+c`MXwX z{?#uZy8hPu?7XEi_l_zLna^L=nv4>)PInJ<`EDL2#6oM6Q?A<`Ak`tXy7;4{cU1B~ zpOIcs>lx%1&jbpqu3SQ~(R0IP-JqJr*>OE@{qDpqWk9dd?nLRizN&Rv_GuGqAhz^R zNVh9oQXmKKaz1il`0~YjOiXupKUEQPQrU=G18z$^wuo@~cn-1{YFLE9<5uf3s)7B1 z0Ybb2H*$^2R(>73m-AGu{Zj(sxY2<4MYJwqvR{8_fOR6NxKP&u`gV7#%d$>4!_p$d zsk}0&crDl1f20LNNy2@Ld{e{P*el~;mTik<@bY#z|0RchL7$UnvzSy*OKEErix~2 z&B+S{oW6>5QKYVSbB<3FixgtDQHN&d5(>8E(#1aZnjUvKtC)4b(a>hwBD zKGq2QG~q^q3>^a@INxXUJbAj7q#lJAG8j5vRt8H*C-2+at%B#XK+*bhC{aoa)t?919Y zSCz}zoC~{fRj+?l8#;Sa!W>x6oHKk~uJ`Cqm*Rlmyvy$blLx9#{0U9NS|Vze%^IWRRG4@>(zZy`gMUo18ZdA_53zy^{duAv*}ri{PtX&dlNLYYo*k9Iy?c0n=}v@{$SxQ9Th|R- zKt&4%e+RTl{Wt&Hac&o5qGa6{<)567249!2=Y9HjnXJ5vm)!`;E`=N^MNehf*-xVt z+C4*uT>aF*kkF+!C@tyA>t$LxR@&W|-O+M9RmABtPZ}KHQq`J4XW)$+)SkA08sLbP zY0|-wsu~!k26Uxfc!AJKrnhKnj4??ULTX}@;1|foUKpOAp~Gi zIL^AmV>>N+^ZOm@leZU=;5(96uY4=4by7r*cLYrO++rj(6kP0EraYy~?Y7*)M0AlV z^zIFjS0@q;-O>5g4aVa4!5eA1vCHxw$uv!{*#FWF{;rCDTgZ?!9x_I56#sV<#T`S(DX|OH|0G$Kk(@R3pvfSm?IjYHcVaWr|KZ`9sShx z<-7UZvT`9=?)R0z&M!-;i)Hci;CWTs7Wp5ALg~Vno0hYd7dzcMFR@G>#Q0En%?}{F zd^{{rmDtNuUem$pU8-OkrFs8RxgXhw3!8h)NEb3R^(yz-$7D0`0%+G8a&QsC>p)d( z5|aa&w-lbyItn~qvy!^XMicwt4tT=QO{!wdWp}BVna$%aCLoUNztC z679p!X;0~{&X~HI-#pI#<${aq8r;@Ffi`}J>rzucn(Sy`Jdo=? z{NN$E6%a!Os5z47e8U-NvJ$RcC56sBN~N>sGxT!=qxkN~$qmloXDAzF9ezno2qNmm zFu%bdtdhK0AYj8O2QR_#a4ZfX$Z^G=HPBUCt-2K2y-JU z6qw(VjttO=Ds_%yg}l3Rs;~RtO!)@Qbt=+GmbT_NRJR2>fRmrtPpa_CUixweS?)k@ zD7ejipeon5Og=(5XFm1kPB&QS5pxL@ddgCVcB0|nR=nG9&4L69U1tTX&lQ$Ql0WDC zy|6f_CGsi}A>tKIl5HkFg@_M#SrK?RP2iMVr>?_QQwn=xB#Y3+9_NiD9tp?UMv0ctA@i?2AUyzF}HTkbo zj@=4&4zUGAmqY2j8opeTAw{YjyqA|+gvr!LaO_a2$kN*~YvC#=@pL^zM&6#-9dSDAH=Tr7n=~%LUm47;9{VZ9l&2T_}8v#a0QV3`v6jH;a?Q*>2z>s z{sG_cziV2_-e##BA^&W8wd%_-?#OA{8_W%NLMp=rC4n#EH%zC=5ux0NpR9um7QPgFZsg# zI`ir6N3IXubPRVi1wMuH8)1S(2pTovQEQ$nn?i2x5-X;p=rAG-tp$!v?rpM}180sQ zP|AwH4&={>cqlc>zMOnpui!wfZ#UETRkZQ{x&Z%LNtFSHK#?oJ#rK7g`$@m#1t|M| z_aD&YI|m^0=zn6TtpFJ2^h8p#Ijtc$n9aOuH$N$g(ewjgJHpagbuHWc-KKTu8 zm+(I-`i_C~YKU5xKI_^t6YtWQ>WHWUQEPk7PeI}EQ36|V^d@Y7%16?yP|YYazEisx z?X6G72;ZhrC;ak9^arPt2K!r>%GR1kI*B}-xIc@G$n@=9hNg;~3xi?Nupw{wJ08YD z01%J(tjZjRBX8GcH;b=JJ_SwfGdMDko_@>fv^@K!82?dLuu?7la?;87^0?FNglxP2nOoH+-pp#U7 z4gv&e$H$h?F2f%-ORh`;LxoR(`VYq*Txhx$&2d*bEIju}ugKT= zd46V0kWmyo_UCSV?b7vLXo}WcK@;DTDh1_`<+X@glkMrs3VV8&Z_nw&3S!ozQw%%7 zQp8bqV(&=52#bgmr0lZD#Fb2xlv>A_aeA_d-OW5JPVp+iUav%FYI^!6u1R~XxK2A< zwTXc>#G`!PR_1ZNbzNjXi&6k*jXmL6mqol@v6*0xz+rtO-t+9jr)IEvmP77Oluov& z@vo1(fVwp9IY5PUbBDLSs*{}010qTi0wF9e1Tcw%;?`XRjMhAaYAb)7OP_Yu3I~d# zVb`@WBcP5R!n3?WwPJa0G_pBMV!%C%H{Y;=?8KBnel7j9Ay499r`IFpnKy2x0_{G;`%^>tmX2G zD*Tx#yY0|Pvl!7R`Htq0MkzLievZ!*O$GZXc}8kUpN{=phM@&Lyka)XRu%YMQ1LUt zY;lUa%HWBs=l^QG41xh~yc?zDg@EKeP_`!Dc`i@5amao zfJz_!ciY99?e_^Dhnnpk^9u=Ye*s;)(n7n+OIII5rwCP2Sq{Nd_GAO0BUPs#OA!`j z2q{So)VU8DIVqL$3&{f~OrAGjXvbi(Zz3`H_uFBSVmfd4(#L(FAUctYSi-~M{~ zmvSnUUgj4sJiRzrvj^(Gs+0yh{LWMR_(|;r|6t5V$e-iiE}frss<`U*vlArB(IW3U z=s3<*^zZl*PzGV2mlIOzMEl6PP2VM2`9SFAy>=_BB!Sj7(>g;hBatr`3SS~%`}_-J$h z+m0n$4PJm%p54PUx8Px^<@flX<-C^EhWmfR<%gl*?%y!kT5rW^kMu=JrB&fA;l?JZh`F{?`TXhIEd#vFT7( z_S1KCV;cTn*jS$l%<8odkE7>&Un`k-oxuB(&=x(ZPYltugETmK_DyVOx&5OSvq2H3 zbk~8A0M`nmJnbZN?~cUz3>}1*P7jvo^<=uh=esSqK|9R`FPIBd&1>!|Xv5JUHX6A` zoZ`Hhx2gWf&!cGh!=g15BdfxehDTx{h7au#d-*p&Jt4axyoSWv(4EmmhK5Z44t>HpJCb>R}p7Vm-ZEgQk55z7@yyi z3!}7n&G1$9$_C&`=27lxdGwJe$el@H!E!HjvCB}IFp3HMgTUQ_vIp4P%6o2@)|s09 zaTA0=mpr!DO8M3*Us<_|NHipfA}`=YJe(^6n#aqFFy^eS1h^3>w5AOy%aw&Q*p7Xw z!)iIn?0_x^Yz$sj=oOI0OHow5G_@r%I?QJPoN_JaeBxYv-qln+8K7nKQZwj>I8mB1 zPe`Pg#$C8_niW}Bm9+J6LA*;|Qq!*Lu){PcHZdd3U2{OfD4tLaguP4FJzOkFpoSeU zHCRXPjF_3`TO0Ys{eEKNi4%C?`+1p`+<>dv`(r20IYg6m{?t#u{`)@fL_2Q*+Jd+I zz=5LaU6jIq=DgBn!u);M=T8;_N-aU}BCp3-f`@v8uYdy+86CZtF#W~Zho9L84>p-O z2bZ!Gbl*q6>cJL`z71i6_o9kR!C+KC{_F`VI#B^Um|HON{kM;-=A63G%qVL zkx#k*AOxY_d0p>Xung-bdx(^kD|gG0=|j@~U$ICXd(kjEBc5YaNOIGMg02B|Y;Xqj zf4Zch0O^kBlzO7f{1+2Q!DL0f|wE{BF^c)I)y)G7S-oiutcT#*2s4hU)Bg@6X2Q_6kT(dSaQ8G^R` zR5_qoQX80vaEXa`5ytDsaI_kAFnU&A2sc7llx@btGP*T>mVbxj1gGkg9@|%0fkRas zB}wJoYayU(`84$pNY(gOU%#A*=GV!zRQ^GTx7RAdTLj_c z3SPS~b8stZOS!P38U9Xqh7Ojc)y`HXBjf9v(p+@05KOR;X%CD|>TXujLKg#UL`kiB z!(K)ky3QWLrQ+7OXhJD|KychXWE&6$+;~PwnsxJLB zPT;0B>uC>`_juj${NOJCxiL_qC6Yg`HD1p$O|_ZV;Ou?1IDABsosikftrmMSNd!g1 z#rB4-CJR-coHeJdzqyP~Gq< zo90yAs(OK`d8AV$W*x%T)SfW1HGfQq@8*7W^y;Zw!Sd>C)Xgg}QLcrFeXQWL<%GFH zgNKn=grTFs^#xV%57^a`>d%ti*6e-vjM}dSsQ51cqn?nt-Ttg2WSj5`c!nxs#j7zF z^fS?FU`Smm6W~KaTYeP?YS~VZt?Y$J=U@}oAKDfZX{$=BET@ur=U`nI^ozzMfmWaf`i+qJ@!$Q_l^R(1(watz$EWHt2x>WnM(|Fh4=^o_rdrx`KAFG5NF(@-RrsCA*TS!4>YR}#;tBj$fZragFM z=)?1su(>?L{zE;1pbj?0NW3x;20uN36AzXn_}0bLnLeJmrrPlVGvi||h9BDJfEW)2 zD!;V+DbQk=>8cvSvrlNg&FBDnH`9&ecvEoSErV$13V`7446AQ&`P)%E?>< z4@1!-#vjWZI%a#|DXx&=3lg=9Y!cBl1!x;$O$tqiFlN01@Y1plcm9>G@I{^J;iedo z0$NXSD1rl1$ExHd|0=5YfcNkqo*aEcp(khHreHxm8Hf ziy2PL`Eg5kXXh7s(Pk>^h6Ya6B!$^m*ZE{74#sF?W3CL^fZz0{K!R%qS~3oRh=4E4 zE=X8j?~@061X-Z_z|73k#Y;oU!YvssK*+rCJ;ubCIluERHib7jEOE~0wg2`L_N`6p zxDVLt>%6mwluLS?{&bQAcrCBzt+Q0U>V^ueG=R0I%2t$a{mhl^%u^<`=k(28C4Jm8 z&NUq*4`+MVK26IfgNzH%6`G@Jc}LbeeGZN_*k-nlgit@976QholNX8y0%v>>hE}X> zC5u4OtUxT4+*NV{WXSk&iqNFpSPPq)LQSo0za()5; z?@HcXYb;=_?mU>qU;EfzOvZj*e^Bj7B%3$S7n{Z&DsXr~HWRPXhJ2HlX&%C=-7k?R zL!_Lf0k&j-lv#aQMI3olv}8LJXoO@csb7kev1=bGI-$qZcQsGPAcWagUGy7lKGn1( zaQQ2RHe-tcr$LvETn`k#BE1oD)C_Bgg|k;leldxz_8gB`OhCb{0lneBX@=k(_Q9K@ zrAqnO`!R;S)h;SKL2qg1W6z2BL}H&2uCK-JvPfzes;9fvhSbT>h$!R}U1p5UDAFKo z{?#TNq3q3hj(29wO<2n;n!znBcJq9&L=f6#g%2^URQ4$ zuqHd~_${cF$QLapBKJ6GQD0}!R~%~ju<^OZz6cGm2oD|F^9t%*@732Q3$79yg9z0a zpOdZ$FYIb&8`oa#TVjE?5#{;%0X z4M;^_5O3NECkt8J<`O{k>qg{U*xTMbzmbFSXmxAfj)b_B5N(G8l)0>h8MW zyp!S!eb>%|Ilzo}r{;^gOf@}iYq-s9I0+hN7w3+5{%T**>DZ`6drdydb?SF}c`(4a zS4;iD7?{mEzFKA!U$nS6Pm)Fn$=)j%AWB} zX)jp~ops27&0@xhyP>Jp_iDK4or;Q0t(Atv88yBnb1>IVrDM?ixs>QAb2Xs)qJ*fT zG&-X_w8(-m7Fynw91Of&@C}Y1{gBG{Ji#nIjLzz#r3~JeA?Adljz$Wa)#JPn7IThh z@RV`&gBXGAT1VNzQf2yxuohCvPLAo39Ccpk2Srz;7bKb!ojQ`bXh3l3Sa~e7T-!nf zAb0H<_r}2F79ITrRK%B(bcCNBS)Xh; z@(nhOkNZ24N<*N=2KHLl^GJ^(6wjXh8c(UMrczs{df?506xED+4eu>2*Pjfl_P4E3 z+#{Aqd{l{qmO|;N-c`vP->Rx2IbRl6+BXqi(ct*gF(;#?`pWjY;V}=*{IOFNi?MG) z*wU>!^(m==^&Q#!H{Ou%wxWJT1O;wD@~socIm}pZCU=ItK5Gs`!Xe8zFB)>IgiZfd z-;g{cUHeuL*rS-skC1DCoQK_Y+8I)tQmUjK%L~rvP#|nh}Kzl6;l$-Bq=kvuMo`2z)^vuD# z`PfMD!>3AmfB86er0qpolKkodR(%>WDKyi#HfuTvny=4|}q`ld)?Io0jVWdP^r=3yG1uzz3@EtF6`_v&@)u zpnjLNXK503Ryf!KMO#6|K%^y{^jKLTS6#HUaXCgN_sa}o3SqLkqJ;^-7I7X5Kmn5YJ^6YK6T(a#k<4w` ze`oRZ`Mgm+oWDJtIOJ|VJN=(51=BHx4UOy6y>T(q%PX^7hN$BZn5Ya0AOfj#0I9!so)>>CZl}hSBrY9m_Z+_k4{A(ZCzHJMc{D=aMLNKu%MrZ zA3UA!-mkQr7q_W&tY~G(z;mkPoMMiE7&!eZmge||$#ezPFXRERt|CQE6{M^buEMq) zbP`>l3x*po#o|X}?Jm_nCYk}+&_F|KDj11sfJ-B>p%nc>Ube&}V+t%|WwU=mYT;{Udw;P>V!yVq8s>{- zy5IgfGYNZP^~~ZQ5beY^oZP$5%X)c6X?{k+SSF=OqU(MDh0F4QsUwZ`^;gBRLI?57uy_`X=m?>Tu+J10n->+|VdghaAy z+2_SoYVDbJP{eOz)r6B}{}$|%EQ|U;3GF-N(MBjU2y;V6f~ejW#b-N>N$+)=WFxF^ z-h0K|gn?&F1=7z|P`15w7ztPe&JR?#$v%xU07(LW7*CsQ$pOCv`j?cT%!C|H233)& zZk~7pVa;Z5*t_ktbl}Fu>e0gokGEkjB!_&DZ$eRAt~4`NR%%Zz`R0%IW>n7kV~vc; z@!JZyjD?T&n)_WC)D@VU}+m5RI1p#(s&{tl-z5}sE{Yq zvSAt2u;|mXJ$m->l9OO>fcWu`KKzGdPe08Y1ti>plV*w#Ri&(pM^?o96^|nQ~MA zjM}si<|uSDkowh_%N78PB)1waTv3u^VXk_tt-5ULN@b)b#lq3iev@G8E)J{$r0fH8 zdHR<5Bg@CrS<&Q8`1Gr`piVc&*X@c`SUcqD9Sy6mtc`6zZfGJ^gqux@LG|6ZkK_C@ zexRW12fRwcgUb8c8dwA#rgR~u?Ep!iI z;bX0cLE1U}uhB2@E?+)>x9FrUZj0_Il74<*BA+YtQLej7V{Wx;WPxB1F+A@+G2XkjRa9tPSYj%fSpt>BYi~s6Gf&59?qDo zH^8%s=HGiXMEJW1_t*BgzH^iRm;ACN&!x5+514k zV`kz&k0g3$_4Qqrw`aATUd#|*<-C+2T(D6*>|+ZG5uF9A(J14wX{Iw$eQN3d+6Hz6 z@Ony{_oj1t(zN6R63!TqLi&re< z+#O4dAC*G_hTJ3QChurO(A%7e0+=EdvkVJociay3&3UmK1MRW*eK&YO~T1YMM` zoMla|S%|m@%yfPQ@A;8&57c^1RUgUX&)D^8`(^5}=ZBWExY6W4g2zyJcO6Mlbde6| zT$iB+Tz??En=;Varqcy3$waTj6>a%g+oWa`@nrAePhB|a$zibZi^sp4Lv#39EysxI{{abCxyO`!nNCS)eCh(#!jqNiXU~m0vs+@k)Eo;d z&YG>lGWmF!t(>utj#lQ@Iga?19j0_>%Q;QeFRU$9Y(X|m30*dll?sQ>AE=iUIhOg1 zf%F5wieEFTS#y4geimW$?8bbbelUw59vG~lh@Poc?k+_E z=7M+$>r%6~@!=UZG)A)}E04PES!0)ApTs&R;q5{{@f|xVF+vQ|o;z+f;E}f#`#r^xhX`=J+rE*N z=*BETl6HPB5_!z?l(ET&gNFu_)dHL0qk)8At>ncqkP8q4>`u+^lM?pCY3%7DOF5Km z*H#&>*WJV0qO_(%MYBG$YJ$UENiLc15=6i&p{kcto1_DWL-MkaA=p||)K4m6M;pdp zJ-}~q^*hc=bhB!imYa({Q4xEM^U7C!VU(b!M=1;e{z}OPyHoUP!XC|ez zC)y<1w~fKdMa!$ic?CZUZ>z^>-bIMc8EzzMv^32yvZ{D|S0UPv9(}%h7jM{!n580Z zJOdwENuty|=xN%&%vk$J#q*4z!tLTX@jl$oQE#u@JyB}2ex+GhbVzC+X?!Wnd&%i%;G#*(9pgITIiR=mYhs7u$ zZOyeul(cB^M+SGOQ~G!}dJ?Eykk6te|Eb>Khx+d6Y1iA}k8-ZncF#4TZp-Sr5ja%) z*9C{t*EHSc+O1JyKSeXQCvHlQ?uSDxaD<+dXy=TU#8Sm#1!zt!Pf<0C%sB%hj8!5mK0qp zzB>!%2<@4gi@CFg+_#9lIzZ9khY1s`e#5=wiX4T|XOGugUi|u)r!EZatmOh7J$c$k z6w&JvWN96ZW@Z{H35`oDmt}Oao_H%y561uG@}{26^8vZp zc^Oco_iCJ=0$Gc}8hx>cOrrU7nSA^Ls~GNa7=etvf9ac=nY3nev0EUjc-S>|0``N_ z_M!zz8!1j?qKELIx!ZH+`)*>B>1$pp7XzhP*N%?Vo$HN#wnXcVwJIeUH|CW^-u#^R z&+7|1d1XZuLos#C?lL81%E<(d|9MF5D<>M7ZMz zcQs6Y19@rFk~Old3#UmRV^_n)9$(*7#h4MbkNr%f9_iU<7ITxNDXj4ET3cgHSzc!Z zceY4ekMDD;{i*z{(X-0&t$Gj0;3cnLMF{O1KzX=vZeX*YRTwIzU8&+1b0??{bab~P z&_#t9KEYsLf3P^v?q?%d)(b}J7Un6UH=4V$d z_$Jhr2KNY_@yI}|4BiTFS2JvyI)CX4<^W*9>c(7Dzn4sX=_AEOUG}O`VGS9X3V%!d zV6K=|zS9#@3^)(&&H&EC*C4W|5=^I0fSd)wcFcMp=IC3cnQ_xpzHO54!lxh|22Y}L zh69ta;t$T%(Ro{5)+|rlLw}?7`{SI~ezF77$WPvsIsuxywSQSjU+<2Y-8s_w{g zRrJ-GRttZ7376|6tBFj%O0+@SqU7J&^ zdi3)uOmKG)JyF{q`q`#Z=~og{onO9@@%)PKv#T+&ukE%gDpwKeFo=ndHM(ISvi*>^ zgux0u!A@H%Y8iVrPC&&!Q<>Me+UxuO1?2-8{AdPiL;FViw8!+Ot2Oo+E|PoA9bh9A zxvoq5O8kGzX8kD+r>K9=X8kI?yZ%NmV!xdoc5BZ5Bdk$g**uv7;25`K__Ol=09w^L zrlLRPtA3Tuc+2e?u9mBGJj9KI4^Vx0tm@5GgM^^$%cmDkN>Q4*$+QIqT_pOX z_YgcYX-y<_#?9km>=ar0n$Pdtr@cQ&H}u5j=jME1NkjYhxvyxdJ|+0^x@L0 z8yoa<KYvV#nP*M*c|X_E$#%b!32G4a$gI>Ki;#tABjKAQy0RE7y>x26Y%T*0FKo2MfLo@ z%oL(FBB4Phl+H46Mk!|S&C{kMd$51nQb*_oHGDa#BxM6WJxnc{wafg-caL)7IAjMR zpps+-hDQ{UXd0RThFNxZ+Y!x87l$z*Y2&?=cU0e8=hOT|J$(t&cG1KR@^1b2sKiXs)L^ecdUC(?8){^(xIPQeQW3$j(!L zWfrt;IV>$w7?)}f%%4;5TGfaBvv>ahK-oWE@TLCRy#D~8Y@e_AR|BC{yS@EUgM^-; pZT|oxMbG=6&{o}GX@+UFxo%{E;tq$XIPKoI>0&gJUPvdo|JfdM$k+e? literal 0 HcmV?d00001 diff --git a/src/simpleimageclassifier.egg-info/PKG-INFO b/src/simpleimageclassifier.egg-info/PKG-INFO new file mode 100644 index 0000000..296142f --- /dev/null +++ b/src/simpleimageclassifier.egg-info/PKG-INFO @@ -0,0 +1,414 @@ +Metadata-Version: 2.1 +Name: simpleimageclassifier +Version: 1.0.0 +Summary: A simple application to do image classification. +Home-page: https://code-repo.d4science.org/gCubeSystem/simpleimageclassifier +Author: Giancarlo Panichi +Author-email: giancarlo.panichi@isti.cnr.it +License: # European Union Public Licence V. 1.1 + + + EUPL © the European Community 2007 + + + This European Union Public Licence (the “EUPL”) applies to the Work or Software + (as defined below) which is provided under the terms of this Licence. Any use of + the Work, other than as authorised under this Licence is prohibited (to the + extent such use is covered by a right of the copyright holder of the Work). + + The Original Work is provided under the terms of this Licence when the Licensor + (as defined below) has placed the following notice immediately following the + copyright notice for the Original Work: + + Licensed under the EUPL V.1.1 + + or has expressed by any other mean his willingness to license under the EUPL. + + + + ## 1. Definitions + + In this Licence, the following terms have the following meaning: + + - The Licence: this Licence. + + - The Original Work or the Software: the software distributed and/or + communicated by the Licensor under this Licence, available as Source Code and + also as Executable Code as the case may be. + + - Derivative Works: the works or software that could be created by the Licensee, + based upon the Original Work or modifications thereof. This Licence does not + define the extent of modification or dependence on the Original Work required + in order to classify a work as a Derivative Work; this extent is determined by + copyright law applicable in the country mentioned in Article 15. + + - The Work: the Original Work and/or its Derivative Works. + + - The Source Code: the human-readable form of the Work which is the most + convenient for people to study and modify. + + - The Executable Code: any code which has generally been compiled and which is + meant to be interpreted by a computer as a program. + + - The Licensor: the natural or legal person that distributes and/or communicates + the Work under the Licence. + + - Contributor(s): any natural or legal person who modifies the Work under the + Licence, or otherwise contributes to the creation of a Derivative Work. + + - The Licensee or “You”: any natural or legal person who makes any usage of the + Software under the terms of the Licence. + + - Distribution and/or Communication: any act of selling, giving, lending, + renting, distributing, communicating, transmitting, or otherwise making + available, on-line or off-line, copies of the Work or providing access to its + essential functionalities at the disposal of any other natural or legal + person. + + + + ## 2. Scope of the rights granted by the Licence + + The Licensor hereby grants You a world-wide, royalty-free, non-exclusive, + sub-licensable licence to do the following, for the duration of copyright vested + in the Original Work: + + - use the Work in any circumstance and for all usage, reproduce the Work, modify + - the Original Work, and make Derivative Works based upon the Work, communicate + - to the public, including the right to make available or display the Work or + - copies thereof to the public and perform publicly, as the case may be, the + - Work, distribute the Work or copies thereof, lend and rent the Work or copies + - thereof, sub-license rights in the Work or copies thereof. + + Those rights can be exercised on any media, supports and formats, whether now + known or later invented, as far as the applicable law permits so. + + In the countries where moral rights apply, the Licensor waives his right to + exercise his moral right to the extent allowed by law in order to make effective + the licence of the economic rights here above listed. + + The Licensor grants to the Licensee royalty-free, non exclusive usage rights to + any patents held by the Licensor, to the extent necessary to make use of the + rights granted on the Work under this Licence. + + + + ## 3. Communication of the Source Code + + The Licensor may provide the Work either in its Source Code form, or as + Executable Code. If the Work is provided as Executable Code, the Licensor + provides in addition a machine-readable copy of the Source Code of the Work + along with each copy of the Work that the Licensor distributes or indicates, in + a notice following the copyright notice attached to the Work, a repository where + the Source Code is easily and freely accessible for as long as the Licensor + continues to distribute and/or communicate the Work. + + + + ## 4. Limitations on copyright + + Nothing in this Licence is intended to deprive the Licensee of the benefits from + any exception or limitation to the exclusive rights of the rights owners in the + Original Work or Software, of the exhaustion of those rights or of other + applicable limitations thereto. + + + + ## 5. Obligations of the Licensee + + The grant of the rights mentioned above is subject to some restrictions and + obligations imposed on the Licensee. Those obligations are the following: + + Attribution right: the Licensee shall keep intact all copyright, patent or + trademarks notices and all notices that refer to the Licence and to the + disclaimer of warranties. The Licensee must include a copy of such notices and a + copy of the Licence with every copy of the Work he/she distributes and/or + communicates. The Licensee must cause any Derivative Work to carry prominent + notices stating that the Work has been modified and the date of modification. + + Copyleft clause: If the Licensee distributes and/or communicates copies of the + Original Works or Derivative Works based upon the Original Work, this + Distribution and/or Communication will be done under the terms of this Licence + or of a later version of this Licence unless the Original Work is expressly + distributed only under this version of the Licence. The Licensee (becoming + Licensor) cannot offer or impose any additional terms or conditions on the Work + or Derivative Work that alter or restrict the terms of the Licence. + + Compatibility clause: If the Licensee Distributes and/or Communicates Derivative + Works or copies thereof based upon both the Original Work and another work + licensed under a Compatible Licence, this Distribution and/or Communication can + be done under the terms of this Compatible Licence. For the sake of this clause, + “Compatible Licence” refers to the licences listed in the appendix attached to + this Licence. Should the Licensee’s obligations under the Compatible Licence + conflict with his/her obligations under this Licence, the obligations of the + Compatible Licence shall prevail. + + Provision of Source Code: When distributing and/or communicating copies of the + Work, the Licensee will provide a machine-readable copy of the Source Code or + indicate a repository where this Source will be easily and freely available for + as long as the Licensee continues to distribute and/or communicate the Work. + + Legal Protection: This Licence does not grant permission to use the trade names, + trademarks, service marks, or names of the Licensor, except as required for + reasonable and customary use in describing the origin of the Work and + reproducing the content of the copyright notice. + + + + ## 6. Chain of Authorship + + The original Licensor warrants that the copyright in the Original Work granted + hereunder is owned by him/her or licensed to him/her and that he/she has the + power and authority to grant the Licence. + + Each Contributor warrants that the copyright in the modifications he/she brings + to the Work are owned by him/her or licensed to him/her and that he/she has the + power and authority to grant the Licence. + + Each time You accept the Licence, the original Licensor and subsequent + Contributors grant You a licence to their contributions to the Work, under the + terms of this Licence. + + + + ## 7. Disclaimer of Warranty + + The Work is a work in progress, which is continuously improved by numerous + contributors. It is not a finished work and may therefore contain defects or + “bugs” inherent to this type of software development. + + For the above reason, the Work is provided under the Licence on an “as is” basis + and without warranties of any kind concerning the Work, including without + limitation merchantability, fitness for a particular purpose, absence of defects + or errors, accuracy, non-infringement of intellectual property rights other than + copyright as stated in Article 6 of this Licence. + + This disclaimer of warranty is an essential part of the Licence and a condition + for the grant of any rights to the Work. + + + + ## 8. Disclaimer of Liability + + Except in the cases of wilful misconduct or damages directly caused to natural + persons, the Licensor will in no event be liable for any direct or indirect, + material or moral, damages of any kind, arising out of the Licence or of the use + of the Work, including without limitation, damages for loss of goodwill, work + stoppage, computer failure or malfunction, loss of data or any commercial + damage, even if the Licensor has been advised of the possibility of such + damage. However, the Licensor will be liable under statutory product liability + laws as far such laws apply to the Work. + + + + ## 9. Additional agreements + + While distributing the Original Work or Derivative Works, You may choose to + conclude an additional agreement to offer, and charge a fee for, acceptance of + support, warranty, indemnity, or other liability obligations and/or services + consistent with this Licence. However, in accepting such obligations, You may + act only on your own behalf and on your sole responsibility, not on behalf of + the original Licensor or any other Contributor, and only if You agree to + indemnify, defend, and hold each Contributor harmless for any liability incurred + by, or claims asserted against such Contributor by the fact You have accepted + any such warranty or additional liability. + + + + ## 10. Acceptance of the Licence + + The provisions of this Licence can be accepted by clicking on an icon “I agree” + placed under the bottom of a window displaying the text of this Licence or by + affirming consent in any other similar way, in accordance with the rules of + applicable law. Clicking on that icon indicates your clear and irrevocable + acceptance of this Licence and all of its terms and conditions. + + Similarly, you irrevocably accept this Licence and all of its terms and + conditions by exercising any rights granted to You by Article 2 of this Licence, + such as the use of the Work, the creation by You of a Derivative Work or the + Distribution and/or Communication by You of the Work or copies thereof. + + + + ## 11. Information to the public + + In case of any Distribution and/or Communication of the Work by means of + electronic communication by You (for example, by offering to download the Work + from a remote location) the distribution channel or media (for example, a + website) must at least provide to the public the information requested by the + applicable law regarding the Licensor, the Licence and the way it may be + accessible, concluded, stored and reproduced by the Licensee. + + + + ## 12. Termination of the Licence + + The Licence and the rights granted hereunder will terminate automatically upon + any breach by the Licensee of the terms of the Licence. + + Such a termination will not terminate the licences of any person who has + received the Work from the Licensee under the Licence, provided such persons + remain in full compliance with the Licence. + + + + ## 13. Miscellaneous + + Without prejudice of Article 9 above, the Licence represents the complete + agreement between the Parties as to the Work licensed hereunder. + + If any provision of the Licence is invalid or unenforceable under applicable + law, this will not affect the validity or enforceability of the Licence as a + whole. Such provision will be construed and/or reformed so as necessary to make + it valid and enforceable. + + The European Commission may publish other linguistic versions and/or new + versions of this Licence, so far this is required and reasonable, without + reducing the scope of the rights granted by the Licence. New versions of the + Licence will be published with a unique version number. + + All linguistic versions of this Licence, approved by the European Commission, + have identical value. Parties can take advantage of the linguistic version of + their choice. + + + + ## 14. Jurisdiction + + Any litigation resulting from the interpretation of this License, arising + between the European Commission, as a Licensor, and any Licensee, will be + subject to the jurisdiction of the Court of Justice of the European Communities, + as laid down in article 238 of the Treaty establishing the European Community. + + Any litigation arising between Parties, other than the European Commission, and + resulting from the interpretation of this License, will be subject to the + exclusive jurisdiction of the competent court where the Licensor resides or + conducts its primary business. + + + + ## 15. Applicable Law + + This Licence shall be governed by the law of the European Union country where + the Licensor resides or has his registered office. + + This licence shall be governed by the Belgian law if: + + - a litigation arises between the European Commission, as a Licensor, and any + - Licensee; the Licensor, other than the European Commission, has no residence + - or registered office inside a European Union country. + + + + ## Appendix + + + + “Compatible Licences” according to article 5 EUPL are: + + + - GNU General Public License (GNU GPL) v. 2 + + - Open Software License (OSL) v. 2.1, v. 3.0 + + - Common Public License v. 1.0 + + - Eclipse Public License v. 1.0 + + - Cecill v. 2.0 + +Platform: Linux +Classifier: Programming Language :: Python :: 3 +Classifier: License :: European Union Public Licence :: 1.1 +Classifier: Operating System :: OS Independent +Requires-Python: >=3.8 +Description-Content-Type: text/markdown +License-File: LICENSE.md + +# SimpleImageClassifier + +SimpleImageClassifier is a simple example that allows you to clissify a image jpg in input. +Starting from this example, you can first create an installable package via pip3 and then a docker image in which it is installed the created package. +The package declares the simpleimageclassifier command as entrypoint. +So once the package is installed you can use this command at command line to run the example: + +``` +simpleimageclassifier --config-file configs/COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_3x.yaml --input --output --opts MODEL.DEVICE cpu MODEL.WEIGHTS detectron2://COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_3x/137849600/model_final_f10217.pkl + + +``` + +The image created in this way can be executed in a container with the following command: + +``` +docker run -i -t --rm --name simpleimageclassifier-cont simpleimageclassifier simpleimageclassifier --config-file configs/COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_3x.yaml --input --output --opts MODEL.DEVICE cpu MODEL.WEIGHTS detectron2://COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_3x/137849600/model_final_f10217.pkl +``` + +To be able to create an image from this application you need to have Docker and Docker-Compose installed on your machine and the relative python packages, see: +[Docker](https://docs.docker.com/engine/), +[Docker-Compose](https://docs.docker.com/compose/install/) and +[Docker Package for Python](https://pypi.org/project/docker/). + +## Useful Commands + +### Create Distribution Package +``` +python3 setup.py sdist --formats=gztar +``` +### Create Docker Image +``` +docker build -t simpleimageclassifier . +``` + +### Save Docker Image in file +``` +docker save simpleimageclassifier | gzip > simpleimageclassifier.tar.gz +``` + +### Publish Docker Image on DockerHub +Re-tagging an existing local image: + +``` +docker tag simpleimageclassifier /[:] +``` + +Login in DockerHub(use your Docker ID): + +``` +docker login +``` + +Now you can push this repository to the registry designated by its name or tag: + +``` +docker push /: +``` + +Then logout for security: + +``` +docker logout +``` + + +## Authors + +* **Giancarlo Panichi** ([ORCID](http://orcid.org/0000-0001-8375-6644)) - [ISTI-CNR Infrascience Group](http://nemis.isti.cnr.it/groups/infrascience) + + + +## License + +This project is licensed under the EUPL V.1.1 License - see the [LICENSE.md](LICENSE.md) file for details. + + +## About the gCube Framework +This software is part of the [gCubeFramework](https://www.gcube-system.org/ "gCubeFramework"): an +open-source software toolkit used for building and operating Hybrid Data +Infrastructures enabling the dynamic deployment of Virtual Research Environments +by favouring the realisation of reuse oriented policies. + +The projects leading to this software have received funding from a series of European Union programmes see [FUNDING.md](FUNDING.md) + + + diff --git a/src/simpleimageclassifier.egg-info/SOURCES.txt b/src/simpleimageclassifier.egg-info/SOURCES.txt new file mode 100644 index 0000000..2eb0526 --- /dev/null +++ b/src/simpleimageclassifier.egg-info/SOURCES.txt @@ -0,0 +1,146 @@ +LICENSE.md +MANIFEST.in +README.md +setup.py +src/simpleimageclassifier/__init__.py +src/simpleimageclassifier/__main__.py +src/simpleimageclassifier/predictor.py +src/simpleimageclassifier/simpleimageclassifier.py +src/simpleimageclassifier.egg-info/PKG-INFO +src/simpleimageclassifier.egg-info/SOURCES.txt +src/simpleimageclassifier.egg-info/dependency_links.txt +src/simpleimageclassifier.egg-info/entry_points.txt +src/simpleimageclassifier.egg-info/top_level.txt +src/simpleimageclassifier/configs/Base-RCNN-C4.yaml +src/simpleimageclassifier/configs/Base-RCNN-DilatedC5.yaml +src/simpleimageclassifier/configs/Base-RCNN-FPN.yaml +src/simpleimageclassifier/configs/Base-RetinaNet.yaml +src/simpleimageclassifier/configs/COCO-Detection/fast_rcnn_R_50_FPN_1x.yaml +src/simpleimageclassifier/configs/COCO-Detection/faster_rcnn_R_101_C4_3x.yaml +src/simpleimageclassifier/configs/COCO-Detection/faster_rcnn_R_101_DC5_3x.yaml +src/simpleimageclassifier/configs/COCO-Detection/faster_rcnn_R_101_FPN_3x.yaml +src/simpleimageclassifier/configs/COCO-Detection/faster_rcnn_R_50_C4_1x.yaml +src/simpleimageclassifier/configs/COCO-Detection/faster_rcnn_R_50_C4_3x.yaml +src/simpleimageclassifier/configs/COCO-Detection/faster_rcnn_R_50_DC5_1x.yaml +src/simpleimageclassifier/configs/COCO-Detection/faster_rcnn_R_50_DC5_3x.yaml +src/simpleimageclassifier/configs/COCO-Detection/faster_rcnn_R_50_FPN_1x.yaml +src/simpleimageclassifier/configs/COCO-Detection/faster_rcnn_R_50_FPN_3x.yaml +src/simpleimageclassifier/configs/COCO-Detection/faster_rcnn_X_101_32x8d_FPN_3x.yaml +src/simpleimageclassifier/configs/COCO-Detection/fcos_R_50_FPN_1x.py +src/simpleimageclassifier/configs/COCO-Detection/retinanet_R_101_FPN_3x.yaml +src/simpleimageclassifier/configs/COCO-Detection/retinanet_R_50_FPN_1x.py +src/simpleimageclassifier/configs/COCO-Detection/retinanet_R_50_FPN_1x.yaml +src/simpleimageclassifier/configs/COCO-Detection/retinanet_R_50_FPN_3x.yaml +src/simpleimageclassifier/configs/COCO-Detection/rpn_R_50_C4_1x.yaml +src/simpleimageclassifier/configs/COCO-Detection/rpn_R_50_FPN_1x.yaml +src/simpleimageclassifier/configs/COCO-InstanceSegmentation/mask_rcnn_R_101_C4_3x.yaml +src/simpleimageclassifier/configs/COCO-InstanceSegmentation/mask_rcnn_R_101_DC5_3x.yaml +src/simpleimageclassifier/configs/COCO-InstanceSegmentation/mask_rcnn_R_101_FPN_3x.yaml +src/simpleimageclassifier/configs/COCO-InstanceSegmentation/mask_rcnn_R_50_C4_1x.py +src/simpleimageclassifier/configs/COCO-InstanceSegmentation/mask_rcnn_R_50_C4_1x.yaml +src/simpleimageclassifier/configs/COCO-InstanceSegmentation/mask_rcnn_R_50_C4_3x.yaml +src/simpleimageclassifier/configs/COCO-InstanceSegmentation/mask_rcnn_R_50_DC5_1x.yaml +src/simpleimageclassifier/configs/COCO-InstanceSegmentation/mask_rcnn_R_50_DC5_3x.yaml +src/simpleimageclassifier/configs/COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_1x.py +src/simpleimageclassifier/configs/COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_1x.yaml +src/simpleimageclassifier/configs/COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_1x_giou.yaml +src/simpleimageclassifier/configs/COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_3x.yaml +src/simpleimageclassifier/configs/COCO-InstanceSegmentation/mask_rcnn_X_101_32x8d_FPN_3x.yaml +src/simpleimageclassifier/configs/COCO-InstanceSegmentation/mask_rcnn_regnetx_4gf_dds_fpn_1x.py +src/simpleimageclassifier/configs/COCO-InstanceSegmentation/mask_rcnn_regnety_4gf_dds_fpn_1x.py +src/simpleimageclassifier/configs/COCO-Keypoints/Base-Keypoint-RCNN-FPN.yaml +src/simpleimageclassifier/configs/COCO-Keypoints/keypoint_rcnn_R_101_FPN_3x.yaml +src/simpleimageclassifier/configs/COCO-Keypoints/keypoint_rcnn_R_50_FPN_1x.py +src/simpleimageclassifier/configs/COCO-Keypoints/keypoint_rcnn_R_50_FPN_1x.yaml +src/simpleimageclassifier/configs/COCO-Keypoints/keypoint_rcnn_R_50_FPN_3x.yaml +src/simpleimageclassifier/configs/COCO-Keypoints/keypoint_rcnn_X_101_32x8d_FPN_3x.yaml +src/simpleimageclassifier/configs/COCO-PanopticSegmentation/Base-Panoptic-FPN.yaml +src/simpleimageclassifier/configs/COCO-PanopticSegmentation/panoptic_fpn_R_101_3x.yaml +src/simpleimageclassifier/configs/COCO-PanopticSegmentation/panoptic_fpn_R_50_1x.py +src/simpleimageclassifier/configs/COCO-PanopticSegmentation/panoptic_fpn_R_50_1x.yaml +src/simpleimageclassifier/configs/COCO-PanopticSegmentation/panoptic_fpn_R_50_3x.yaml +src/simpleimageclassifier/configs/Cityscapes/mask_rcnn_R_50_FPN.yaml +src/simpleimageclassifier/configs/Detectron1-Comparisons/README.md +src/simpleimageclassifier/configs/Detectron1-Comparisons/faster_rcnn_R_50_FPN_noaug_1x.yaml +src/simpleimageclassifier/configs/Detectron1-Comparisons/keypoint_rcnn_R_50_FPN_1x.yaml +src/simpleimageclassifier/configs/Detectron1-Comparisons/mask_rcnn_R_50_FPN_noaug_1x.yaml +src/simpleimageclassifier/configs/LVISv0.5-InstanceSegmentation/mask_rcnn_R_101_FPN_1x.yaml +src/simpleimageclassifier/configs/LVISv0.5-InstanceSegmentation/mask_rcnn_R_50_FPN_1x.yaml +src/simpleimageclassifier/configs/LVISv0.5-InstanceSegmentation/mask_rcnn_X_101_32x8d_FPN_1x.yaml +src/simpleimageclassifier/configs/LVISv1-InstanceSegmentation/mask_rcnn_R_101_FPN_1x.yaml +src/simpleimageclassifier/configs/LVISv1-InstanceSegmentation/mask_rcnn_R_50_FPN_1x.yaml +src/simpleimageclassifier/configs/LVISv1-InstanceSegmentation/mask_rcnn_X_101_32x8d_FPN_1x.yaml +src/simpleimageclassifier/configs/Misc/cascade_mask_rcnn_R_50_FPN_1x.yaml +src/simpleimageclassifier/configs/Misc/cascade_mask_rcnn_R_50_FPN_3x.yaml +src/simpleimageclassifier/configs/Misc/cascade_mask_rcnn_X_152_32x8d_FPN_IN5k_gn_dconv.yaml +src/simpleimageclassifier/configs/Misc/mask_rcnn_R_50_FPN_1x_cls_agnostic.yaml +src/simpleimageclassifier/configs/Misc/mask_rcnn_R_50_FPN_1x_dconv_c3-c5.yaml +src/simpleimageclassifier/configs/Misc/mask_rcnn_R_50_FPN_3x_dconv_c3-c5.yaml +src/simpleimageclassifier/configs/Misc/mask_rcnn_R_50_FPN_3x_gn.yaml +src/simpleimageclassifier/configs/Misc/mask_rcnn_R_50_FPN_3x_syncbn.yaml +src/simpleimageclassifier/configs/Misc/mmdet_mask_rcnn_R_50_FPN_1x.py +src/simpleimageclassifier/configs/Misc/panoptic_fpn_R_101_dconv_cascade_gn_3x.yaml +src/simpleimageclassifier/configs/Misc/scratch_mask_rcnn_R_50_FPN_3x_gn.yaml +src/simpleimageclassifier/configs/Misc/scratch_mask_rcnn_R_50_FPN_9x_gn.yaml +src/simpleimageclassifier/configs/Misc/scratch_mask_rcnn_R_50_FPN_9x_syncbn.yaml +src/simpleimageclassifier/configs/Misc/semantic_R_50_FPN_1x.yaml +src/simpleimageclassifier/configs/Misc/torchvision_imagenet_R_50.py +src/simpleimageclassifier/configs/PascalVOC-Detection/faster_rcnn_R_50_C4.yaml +src/simpleimageclassifier/configs/PascalVOC-Detection/faster_rcnn_R_50_FPN.yaml +src/simpleimageclassifier/configs/common/README.md +src/simpleimageclassifier/configs/common/coco_schedule.py +src/simpleimageclassifier/configs/common/optim.py +src/simpleimageclassifier/configs/common/train.py +src/simpleimageclassifier/configs/common/data/coco.py +src/simpleimageclassifier/configs/common/data/coco_keypoint.py +src/simpleimageclassifier/configs/common/data/coco_panoptic_separated.py +src/simpleimageclassifier/configs/common/data/constants.py +src/simpleimageclassifier/configs/common/models/cascade_rcnn.py +src/simpleimageclassifier/configs/common/models/fcos.py +src/simpleimageclassifier/configs/common/models/keypoint_rcnn_fpn.py +src/simpleimageclassifier/configs/common/models/mask_rcnn_c4.py +src/simpleimageclassifier/configs/common/models/mask_rcnn_fpn.py +src/simpleimageclassifier/configs/common/models/mask_rcnn_vitdet.py +src/simpleimageclassifier/configs/common/models/panoptic_fpn.py +src/simpleimageclassifier/configs/common/models/retinanet.py +src/simpleimageclassifier/configs/new_baselines/mask_rcnn_R_101_FPN_100ep_LSJ.py +src/simpleimageclassifier/configs/new_baselines/mask_rcnn_R_101_FPN_200ep_LSJ.py +src/simpleimageclassifier/configs/new_baselines/mask_rcnn_R_101_FPN_400ep_LSJ.py +src/simpleimageclassifier/configs/new_baselines/mask_rcnn_R_50_FPN_100ep_LSJ.py +src/simpleimageclassifier/configs/new_baselines/mask_rcnn_R_50_FPN_200ep_LSJ.py +src/simpleimageclassifier/configs/new_baselines/mask_rcnn_R_50_FPN_400ep_LSJ.py +src/simpleimageclassifier/configs/new_baselines/mask_rcnn_R_50_FPN_50ep_LSJ.py +src/simpleimageclassifier/configs/new_baselines/mask_rcnn_regnetx_4gf_dds_FPN_100ep_LSJ.py +src/simpleimageclassifier/configs/new_baselines/mask_rcnn_regnetx_4gf_dds_FPN_200ep_LSJ.py +src/simpleimageclassifier/configs/new_baselines/mask_rcnn_regnetx_4gf_dds_FPN_400ep_LSJ.py +src/simpleimageclassifier/configs/new_baselines/mask_rcnn_regnety_4gf_dds_FPN_100ep_LSJ.py +src/simpleimageclassifier/configs/new_baselines/mask_rcnn_regnety_4gf_dds_FPN_200ep_LSJ.py +src/simpleimageclassifier/configs/new_baselines/mask_rcnn_regnety_4gf_dds_FPN_400ep_LSJ.py +src/simpleimageclassifier/configs/quick_schedules/README.md +src/simpleimageclassifier/configs/quick_schedules/cascade_mask_rcnn_R_50_FPN_inference_acc_test.yaml +src/simpleimageclassifier/configs/quick_schedules/cascade_mask_rcnn_R_50_FPN_instant_test.yaml +src/simpleimageclassifier/configs/quick_schedules/fast_rcnn_R_50_FPN_inference_acc_test.yaml +src/simpleimageclassifier/configs/quick_schedules/fast_rcnn_R_50_FPN_instant_test.yaml +src/simpleimageclassifier/configs/quick_schedules/keypoint_rcnn_R_50_FPN_inference_acc_test.yaml +src/simpleimageclassifier/configs/quick_schedules/keypoint_rcnn_R_50_FPN_instant_test.yaml +src/simpleimageclassifier/configs/quick_schedules/keypoint_rcnn_R_50_FPN_normalized_training_acc_test.yaml +src/simpleimageclassifier/configs/quick_schedules/keypoint_rcnn_R_50_FPN_training_acc_test.yaml +src/simpleimageclassifier/configs/quick_schedules/mask_rcnn_R_50_C4_GCV_instant_test.yaml +src/simpleimageclassifier/configs/quick_schedules/mask_rcnn_R_50_C4_inference_acc_test.yaml +src/simpleimageclassifier/configs/quick_schedules/mask_rcnn_R_50_C4_instant_test.yaml +src/simpleimageclassifier/configs/quick_schedules/mask_rcnn_R_50_C4_training_acc_test.yaml +src/simpleimageclassifier/configs/quick_schedules/mask_rcnn_R_50_DC5_inference_acc_test.yaml +src/simpleimageclassifier/configs/quick_schedules/mask_rcnn_R_50_FPN_inference_acc_test.yaml +src/simpleimageclassifier/configs/quick_schedules/mask_rcnn_R_50_FPN_instant_test.yaml +src/simpleimageclassifier/configs/quick_schedules/mask_rcnn_R_50_FPN_pred_boxes_training_acc_test.yaml +src/simpleimageclassifier/configs/quick_schedules/mask_rcnn_R_50_FPN_training_acc_test.yaml +src/simpleimageclassifier/configs/quick_schedules/panoptic_fpn_R_50_inference_acc_test.yaml +src/simpleimageclassifier/configs/quick_schedules/panoptic_fpn_R_50_instant_test.yaml +src/simpleimageclassifier/configs/quick_schedules/panoptic_fpn_R_50_training_acc_test.yaml +src/simpleimageclassifier/configs/quick_schedules/retinanet_R_50_FPN_inference_acc_test.yaml +src/simpleimageclassifier/configs/quick_schedules/retinanet_R_50_FPN_instant_test.yaml +src/simpleimageclassifier/configs/quick_schedules/rpn_R_50_FPN_inference_acc_test.yaml +src/simpleimageclassifier/configs/quick_schedules/rpn_R_50_FPN_instant_test.yaml +src/simpleimageclassifier/configs/quick_schedules/semantic_R_50_FPN_inference_acc_test.yaml +src/simpleimageclassifier/configs/quick_schedules/semantic_R_50_FPN_instant_test.yaml +src/simpleimageclassifier/configs/quick_schedules/semantic_R_50_FPN_training_acc_test.yaml \ No newline at end of file diff --git a/src/simpleimageclassifier.egg-info/dependency_links.txt b/src/simpleimageclassifier.egg-info/dependency_links.txt new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/src/simpleimageclassifier.egg-info/dependency_links.txt @@ -0,0 +1 @@ + diff --git a/src/simpleimageclassifier.egg-info/entry_points.txt b/src/simpleimageclassifier.egg-info/entry_points.txt new file mode 100644 index 0000000..4126060 --- /dev/null +++ b/src/simpleimageclassifier.egg-info/entry_points.txt @@ -0,0 +1,3 @@ +[console_scripts] +simpleimageclassifier = simpleimageclassifier.simpleimageclassifier:simpleimageclassifier + diff --git a/src/simpleimageclassifier.egg-info/top_level.txt b/src/simpleimageclassifier.egg-info/top_level.txt new file mode 100644 index 0000000..2ed84ce --- /dev/null +++ b/src/simpleimageclassifier.egg-info/top_level.txt @@ -0,0 +1 @@ +simpleimageclassifier diff --git a/src/simpleimageclassifier/__init__.py b/src/simpleimageclassifier/__init__.py new file mode 100644 index 0000000..f520d5e --- /dev/null +++ b/src/simpleimageclassifier/__init__.py @@ -0,0 +1,2 @@ +print("__init__.py") +print(__package__) diff --git a/src/simpleimageclassifier/__main__.py b/src/simpleimageclassifier/__main__.py new file mode 100644 index 0000000..fe20013 --- /dev/null +++ b/src/simpleimageclassifier/__main__.py @@ -0,0 +1,6 @@ +print(__name__) +print(__package__) +from .simpleimageclassifier import simpleimageclassifier + +if __name__ == '__main__': + simpleimageclassifier() \ No newline at end of file diff --git a/src/simpleimageclassifier/__pycache__/__init__.cpython-38.pyc b/src/simpleimageclassifier/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c5a7e511056e7e6c9f09caf3cfef2fb202801b66 GIT binary patch literal 233 zcmWIL<>g`k0*~OENg6=azF3wELOfAwcE=tBBgXDx; V95%W6DWy57c8nm0gM7fj2mn7BL3scG literal 0 HcmV?d00001 diff --git a/src/simpleimageclassifier/__pycache__/__main__.cpython-38.pyc b/src/simpleimageclassifier/__pycache__/__main__.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..677a5d91a7b7dcd99ea3f2ad721adac12e740edb GIT binary patch literal 307 zcmZ`!OG*SW5KSc`26dJmqFqcbAmYxQ2;#JgVwuxjYbzSNnGV%=+G%-;ms#bCnen34$F!Q`vM719%nGtzr)bIm&!O zYA1N2u0^-_01kCfkBbYFCl~CRdT&c{4~}xxh{3vuc-bxL-jrROn-^^$#CnK0*?!~* zVs2-=h`i78O`msWlLDEwv$40?C-ao@SU5lzZob;in-tBTTTRLRKa-zxR@1iX(~yQo QPiO6gqM0;kIFcH^08v_1)&Kwi literal 0 HcmV?d00001 diff --git a/src/simpleimageclassifier/__pycache__/predictor.cpython-38.pyc b/src/simpleimageclassifier/__pycache__/predictor.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..261e3431fef0180fcc4a855fb93b12812d5f280f GIT binary patch literal 7066 zcmai3&2t<_6`!83on5V@wd^>t}d#FaM z_v_d1{oZ@6_h)Bo8h(F^K40y9Nz?vKACo^5A7}8SKSRMa&LXWZ@p#Utw(0p zYFVu4vsn(NND=DUqngK3vEUg6F? zjXUDNw$WO^ca_)hUE9`M2RXa0HD?}yA8n<{a*K;weSymSWs&*Z&-`UPQPM$EB3kyIcw`&=f*wOvbf2udm8p{^9tVB2e0Do@EUQQ9~jN=XJHbb7v1DyKr$ZvEL7-w z8Be-~BGZN}(+6xF>ufW5tf+BzM2oTJ4jX8mmD$4zH&opj=xC`943xEjAzPWfJ~OQE zXj^7BJ75Eon-7R$F0N(s1GY`$=ePyl?9Kny-12u;cGAAx-Wj}UNcWH*m4P%#w?Y592 z&fK0%0+FU7b=xxOy0^=7Q{r2iVv*Qa-a05SfSB0BK$(`{G zl0n!l$MgDeKNUPTf?oe2%bmF2^*X(Ny3%Y&Qh?m(h%7fE5$DeNvp3FOzi{JvZr-?X z{YLI|O9jj=s9La=+g#iZ11J^d^2++`l(=$dB$nI^`rOa!1(_El{W#0#&Zc+cU{`zd zYHy^-l9Z%rde+{X0jS<=$l9PDgV-FV4%~W^5@`f<55QNCsa7wg3=DP0o!3)#(;7B`$Qj? zT>r#q8%MM)mN6)~whrobgBuTwEhzlJ*sx`LU~uz@_G5j^+Oh}c&>UDp3sg4D#@^Al z^!OcaDaxOtw*qSGh_>#K89!hnO02GPM^XJA6wVQC)U#C?nCsO+g;yUCuf{-c>oBUC ze;j(XtET(yN(tOujC)Hl_oejj!Z4toMRU@2(>1>bqk8Lv`@so!x#=##IL=+V<|dK@ zy2k2D%1^%=Yhmi*KUq^dkzGG?NuAx*yY7{qh?j3J)pohLCbGVa_ZWJ4t^lXhT_n&1 zLo`{^&wBmL1xvezI=wn9%`1r(an5g(CY57HMhHI2jj74T<5wO%#;?hg4(IirA1A#m z47^ly@=7YYR8a;Bi!^xH&8s6TN`dGz6FfI-@(2`0R;XxDLE4zVz@>l3o1E917?kU< z*Mme?S9Ilyx$VnN*Z(*_qFP5wPL6Ck@+gh7)V^A^u@K}}(5@OvKO2b9*j z4L4cY*0!t+R=&;7YagD6AZ%E-HH3BVFh(<4!&*@v07*71c@DiZaIqEDN6wbqaAn|Z z>q>Oz;Cd}M@6Y}Z-TnC3z`&Y^u^z^QLyOiaXZa&HfGDQFG>Ir95y3xBD72fzaF-B# zIx^TWChJwAo4~ePnh}FRF0>w}-mTZTSnBeS(p@{e%Xetr%v;sg_+E~-o zHiaLojV7bLkKtuXNhrc1B_VrHf1{LS2$zvTN#F+HY6cAM;xsuK74-D04(LC-Ls8833E8K~}~wL+~NjtY(iaUvW`Iff`H z(u3=%?hr3EY8jXmm#rw|D4^mv_*FnCUqlV@q=A#lo#1w+!mfOF`O3Rju6ghfr>b#l6lligN;}szg=7lBsLf=$21K(K*yd{^<=%&8YT2dNqa^U7 z^v$IwthCaclXTK!A0R1eSMibZ5cxq~9PuR44&npqgTNToB)qg0?w4DOKQ)o^ihHII1;3 z!_<#l&lx;vhGLXRan>@B=usMllo5%Zi7%B*aZ3Q1kqB1!v%=w(Uo(|dSuG@mdN)uwmfrlALa4JN=mZ>P zYlxWOGm8kS$VYq%!q;RHC;c?Kd&2D{2#;5zyM(fDzUp4O`hHqq%{dYHh_+lLE{M*; zxT9jFk##67b~Aqis2m`?0=!C}rt!NJ>?@=WyW)i6DWf=UO+-CJb6x27ZAxck((kMh zj{9yDW?3ZM)i9$xDxO$U9dyaiKFL#(a|TaJDm<1vB%*H~nNBvAyz5!gyOC^&c%^B` zix?m;QSlZPL0@0XAaf48PS!ur~q_T-J;!z{yI~XR(SP`IBJx@7G&l{tm8u1JC zE9=l;#`JIVm8b4#oE*`yjecaCw7q3KDV;^8A&o&MzoinNp@Hy2&&+|IkLSfomZJ_;ZCIW`f732- zkV5a=B&G>SDmmeuV)kp@zn41~853z%=04@uv$0BeCQjl~h1= zgE*KiPaqIcG&%*PrkBxMpzNGCr92PNk&;vqT-)*{DvwC!vW6lU=_=|fav*?H(*G+e zkUm1nZOAucNIMO#9|C+mAa&c{Vs>y=bYC+Xr9#ZN=wyZ!oEu_~p+hQ(In51Iu5k)G z%^ilj9ve45HE#xIXASGw>~Kycx=`hHa|d@nIK^{Fbv18Zl?}Dh0aY&07=)E|oV+zW zNb}$NU3O^r4B8LVNM3<`Ih%YpUfUlRKIQ&rbVEY9DP@ac+x;fO!7z%{y$(Qf`Re<- zQ_Bw^v_~XTWNsB?SIX~_us17o19JK&Tax1VmFBa#LCKfi?d8^cWQ$`f%Pn&G$S$NG zcSLRlQ4)*10*@|}G|TNG^UbZxSI%E}H@DAUxcT<-1^GQpTWBvGFx+_`;SaR5qT(P_ zW=dPftQ)%@N`)=xzQn|c2|f<9+>xRarkRkrldknMp4=gQq)f0doEe&bh6F=5EkdR( z3wKB%&slGe6)0f%Ey#wqeHTe6@XZPAhdG|6E z@FYDisHP-9yQer`G2x;@5&+g6QaS=1&kS<%3Lo(|i4jYd{1CKb1s^-LW|7+Dtn}VW zSW3}1FyfzhzL>)i7EEzCbw=dsM*fb5bri<=OY@)Y_sKEYj)bb)T=D{RFW1B0-R%#*!s!MFNl+Nl7UxIuHPgh(rJZAkvRq4sz#VQT8GFFLLl7;A^IQa8)j$t6b&s!BvqH%fHvN3&FBHQ`4WX zUw6Ov`n_MzM`L3p4bLyb|JwV_RZaVEs$6^wRQ>^<_zxsZV|t{yS+2XfsvEAUa?8!B z+;(l1=iR)@3vNN>MYpK(l3P-F*)1bCqDpJb9n*=Y8I89l+zC~-qH1f>okTgu>}aYr z?M|bdcV}3E6+hG5Syp0YnMb!ktkxEQsrYC--?E z_LGRYx%-=|YahDB?X}(YjpdECU3YS4WpjJY+r78FwsX(hUEA43@8-I9Z*6&X$E|EF zZ)|SuuCI7^w>I4T%I5v|);HGNtLqy(yUQCZYdhY`=EmLi)isodJY2iMgPU*vglOOZ!PHSZaoTT=#jDEFCBOP_+Ge@O~(_PULN#Z*gi>%nW8cM3#6qg63wu!ie54B>En7q=Vi3jsblnzl5KoSv7ezJ zwF8~$%y_EzHBv1znMJ0_@=q~+FOhOv_wgsAB_T7jtZ zuuUf09a~t)dKSoTy6+9imL7G&`k^Q4`vL1jfmmw!;?U#uIQF)^x85LKdtuxNxUwF< zUiV}m3nid^gN`iE=X>9#NT9hnR#c8Gn=O!F*g_?<|)`BotPBcG?%eh{_0g@uLV zV6X1C+>($9heh&E(4E`$4+AHR+Z`z!vTi3MT7>D?zB+V#3>u(q9EB`M78GrFdTThj zYE*MC*7S@tzmO4qso(6n%MKj~Cuum!#2gW{eeTPEWs^F}#>C${5Tx&jFm6VH(~Ocm z2fLF2j~&se?>oM5Zm^KU3nctx>Gj2fcJl^Sn2X7}vstmfq$9D9Zev-khghDEAvp>E zC~z_w=jfuV5sS`x!-*5=G{d7HzU3T;QRLvX9*@YOUO#p=+d;hY0KG9wju*R^hMH9e zeBu0nHz6)Ozxsan_WdvpTb-68>JYdxmM|6)=Pq#CJYZqw)M#ycftoLd0xfyc4tNDD zvW?l;7`#dM&i#aijgyhO(MBH8>5$MIxgVgT)$(Ju5QTBz-1uPaht7lL4?kMF0a1or z)J(-LV@&6;mP-o|AJicNcYK*QJFOs=TU6z4#b?YzuWCzkjVSRYxvcxk>l>?U+c0ul z4dlwFWC_Kxv_vjH>smlv+(Dw@q+7a?z5i$1Rh;`_{r_!SmHs%)i3SuSY71pP0}gfa z!lTDRfn1r1Py7VQ*Z$$(9`_At9GC;E2QSgnFKcH;-;}unyGLohr~O2e1!g_bhyEk? zW9?tHhOPozK^CcJReOTa@wqlAQ6Ny<%02T;=SL5j<&6<_wj(I49)QH<|t{d<~xVcUk%fG{MGZx=Sb?*yoM9~a&pQ-&_ zI_9vz_MMNxN05qa&Ct5H)Ls*uocfV>Q7DV=MoHa|#2pkyL+{oqX+Z|9whFORyC@~x z=O?S9qn{dyNG)=FsdbQqF#_As?X>)WJkvv3H&3T7R3LwnA19HdIqt{JAkFh23Vacy zb|{p~#Fpy`3+kENz2LD-P52Fd1%oofevwLQn{$6{H`^c)*N|wi?I~s51>GiV{)3&r zgeXU`jxV9VPDG=Jq)*{*1)ummG%&JcW>V~u#uIH|o@sqeS`_h~!Y-vX1UZO*((b`l zVdK;e+kT?M`U^b+Il-Rdsm}jXmZ$`K&mS86b6FmgW#wROFix$ka0O$}0GGTX)y3y> zf`Rs5bXh&9QVYJxipEM$jaBVb|KwWVl2bj4m09Jf(a*{0!8BHfvo)CM<<9hvjefpg z0EEi1vB50p#(VigoBt1+7|dcFc{Y9pYdq6qgH^M!J$p_&!`V`CdJ^kxtJPhidCzpz z@BVpR+>#U2r`AaOxJ)%gGuH#m!)Y(Z*Dvn*R2Tn@U8xa7+b{MCgDbrv&H3n=oF813 z*W~=c3uh$n`N4I04gD{k5hT`rgflZr+0>8Bh6#CPbPg+=>1-M@eWq)t=A+*|dUk5W zw=u(aP7VG5IRN)z0ki)Sv(M0aY4%~;Z_zfZ_Aozqi8%F3vO1_jjxYC0q>)walehcj zZ(UvadgdH5iZH!&mB+2H#bFYku$w<)?c53^TuaYVzNK-%C`+d=+`4R~fv9 zo!8EE*vr^?{dH9zANJk^?ZkQCE!9^Y$~)sUDHjMmJu$G3pR+mW_6qD^sz2SEV)KX8 z`YX9OSYlVtwQTZ_|4geUzpm;^m%T~oa7NYN=*?h{0#@-Eq^Pt(D+j+{ zgP*F$1#naPQ4!<%hMNOEI*xY{S#yXQb#nek1RD@HED8qQ@+d@$cO0IB!{CIIyD#n{ z^ea$CkZJ!lQFD#~mk7%t1$6=@{sp*o90HpHKZwN)a6SJD z&vV>yweG7ceoU*|Cpfm3fNqEv0kIg$836rG_r?&a5$#DJ0R-EK280|H^K-(~dVKrs zH@~zNTZczAgTF9KMnf5ONriaBevQeCzuP zO(hNy^EqMzg~&wuQSI&2XtmucZZsZ*!KDX)y`M0JPu!BiJOr5pr{I@bK<`Ov3dz+4 zhtq{7%_&8tCgOW)5zOT;qnUq~`bxp$HYCsi{7T1{1u_D7Yn&&5@bVFBrFJF}jyM7S zG6X`X@J{R{adxrhx3f6}g|AnrT-ZaTQ&7Sp^vhpGqg$p1_E4eNwEPf&Zu6nHv3!5c z9UE0Px9=~1=-Oe6K(JdROs9aln?LqL`9W~v+QZ94MTOfHzb!lb>*c6G&tIq6CUF^( zJULm8qHIdxmQ^>dH$dyD32>2`n8BS{S>E!twl}xDyW3ce_i%l6_ntfZjn;c>>+j#& zbt_*tZ|(fW(3Q=PHg;9GO$*09kD*lH1}dcm(T)HV9!q``%i}jFd5sbR37jmncA4jq zxpww@f)l86ExPY1MY(3!YSJZ&Oh752d;!c8^2`2W1kB6y(jCkMm77y? z0OAUn+;{E$AZ+eScT(Xz;o%bCi2&C{=9c0PemfBS6MTi1d5aRl*NQj9=rt7YayDc3 zSE7hHvQggz>9gN@zuyBt@iQcZVM=&yeaa}%i~2l$xU*B)!fP1zS2Kp8m;PXUWt9qg z6%_Nv9L7|Pas4$=5$z1#501ALWcCE%x=hcZ;Pp$m$l3r+?)3-7iP`bThw3{jA;C_o_Jc-8L|{8TY5 LvrsL(QmFn3#^R~X literal 0 HcmV?d00001 diff --git a/src/simpleimageclassifier/configs/Base-RCNN-C4.yaml b/src/simpleimageclassifier/configs/Base-RCNN-C4.yaml new file mode 100644 index 0000000..fbf34a0 --- /dev/null +++ b/src/simpleimageclassifier/configs/Base-RCNN-C4.yaml @@ -0,0 +1,18 @@ +MODEL: + META_ARCHITECTURE: "GeneralizedRCNN" + RPN: + PRE_NMS_TOPK_TEST: 6000 + POST_NMS_TOPK_TEST: 1000 + ROI_HEADS: + NAME: "Res5ROIHeads" +DATASETS: + TRAIN: ("coco_2017_train",) + TEST: ("coco_2017_val",) +SOLVER: + IMS_PER_BATCH: 16 + BASE_LR: 0.02 + STEPS: (60000, 80000) + MAX_ITER: 90000 +INPUT: + MIN_SIZE_TRAIN: (640, 672, 704, 736, 768, 800) +VERSION: 2 diff --git a/src/simpleimageclassifier/configs/Base-RCNN-DilatedC5.yaml b/src/simpleimageclassifier/configs/Base-RCNN-DilatedC5.yaml new file mode 100644 index 0000000..c0d6d16 --- /dev/null +++ b/src/simpleimageclassifier/configs/Base-RCNN-DilatedC5.yaml @@ -0,0 +1,31 @@ +MODEL: + META_ARCHITECTURE: "GeneralizedRCNN" + RESNETS: + OUT_FEATURES: ["res5"] + RES5_DILATION: 2 + RPN: + IN_FEATURES: ["res5"] + PRE_NMS_TOPK_TEST: 6000 + POST_NMS_TOPK_TEST: 1000 + ROI_HEADS: + NAME: "StandardROIHeads" + IN_FEATURES: ["res5"] + ROI_BOX_HEAD: + NAME: "FastRCNNConvFCHead" + NUM_FC: 2 + POOLER_RESOLUTION: 7 + ROI_MASK_HEAD: + NAME: "MaskRCNNConvUpsampleHead" + NUM_CONV: 4 + POOLER_RESOLUTION: 14 +DATASETS: + TRAIN: ("coco_2017_train",) + TEST: ("coco_2017_val",) +SOLVER: + IMS_PER_BATCH: 16 + BASE_LR: 0.02 + STEPS: (60000, 80000) + MAX_ITER: 90000 +INPUT: + MIN_SIZE_TRAIN: (640, 672, 704, 736, 768, 800) +VERSION: 2 diff --git a/src/simpleimageclassifier/configs/Base-RCNN-FPN.yaml b/src/simpleimageclassifier/configs/Base-RCNN-FPN.yaml new file mode 100644 index 0000000..3e020f2 --- /dev/null +++ b/src/simpleimageclassifier/configs/Base-RCNN-FPN.yaml @@ -0,0 +1,42 @@ +MODEL: + META_ARCHITECTURE: "GeneralizedRCNN" + BACKBONE: + NAME: "build_resnet_fpn_backbone" + RESNETS: + OUT_FEATURES: ["res2", "res3", "res4", "res5"] + FPN: + IN_FEATURES: ["res2", "res3", "res4", "res5"] + ANCHOR_GENERATOR: + SIZES: [[32], [64], [128], [256], [512]] # One size for each in feature map + ASPECT_RATIOS: [[0.5, 1.0, 2.0]] # Three aspect ratios (same for all in feature maps) + RPN: + IN_FEATURES: ["p2", "p3", "p4", "p5", "p6"] + PRE_NMS_TOPK_TRAIN: 2000 # Per FPN level + PRE_NMS_TOPK_TEST: 1000 # Per FPN level + # Detectron1 uses 2000 proposals per-batch, + # (See "modeling/rpn/rpn_outputs.py" for details of this legacy issue) + # which is approximately 1000 proposals per-image since the default batch size for FPN is 2. + POST_NMS_TOPK_TRAIN: 1000 + POST_NMS_TOPK_TEST: 1000 + ROI_HEADS: + NAME: "StandardROIHeads" + IN_FEATURES: ["p2", "p3", "p4", "p5"] + ROI_BOX_HEAD: + NAME: "FastRCNNConvFCHead" + NUM_FC: 2 + POOLER_RESOLUTION: 7 + ROI_MASK_HEAD: + NAME: "MaskRCNNConvUpsampleHead" + NUM_CONV: 4 + POOLER_RESOLUTION: 14 +DATASETS: + TRAIN: ("coco_2017_train",) + TEST: ("coco_2017_val",) +SOLVER: + IMS_PER_BATCH: 16 + BASE_LR: 0.02 + STEPS: (60000, 80000) + MAX_ITER: 90000 +INPUT: + MIN_SIZE_TRAIN: (640, 672, 704, 736, 768, 800) +VERSION: 2 diff --git a/src/simpleimageclassifier/configs/Base-RetinaNet.yaml b/src/simpleimageclassifier/configs/Base-RetinaNet.yaml new file mode 100644 index 0000000..8b45b98 --- /dev/null +++ b/src/simpleimageclassifier/configs/Base-RetinaNet.yaml @@ -0,0 +1,25 @@ +MODEL: + META_ARCHITECTURE: "RetinaNet" + BACKBONE: + NAME: "build_retinanet_resnet_fpn_backbone" + RESNETS: + OUT_FEATURES: ["res3", "res4", "res5"] + ANCHOR_GENERATOR: + SIZES: !!python/object/apply:eval ["[[x, x * 2**(1.0/3), x * 2**(2.0/3) ] for x in [32, 64, 128, 256, 512 ]]"] + FPN: + IN_FEATURES: ["res3", "res4", "res5"] + RETINANET: + IOU_THRESHOLDS: [0.4, 0.5] + IOU_LABELS: [0, -1, 1] + SMOOTH_L1_LOSS_BETA: 0.0 +DATASETS: + TRAIN: ("coco_2017_train",) + TEST: ("coco_2017_val",) +SOLVER: + IMS_PER_BATCH: 16 + BASE_LR: 0.01 # Note that RetinaNet uses a different default learning rate + STEPS: (60000, 80000) + MAX_ITER: 90000 +INPUT: + MIN_SIZE_TRAIN: (640, 672, 704, 736, 768, 800) +VERSION: 2 diff --git a/src/simpleimageclassifier/configs/COCO-Detection/fast_rcnn_R_50_FPN_1x.yaml b/src/simpleimageclassifier/configs/COCO-Detection/fast_rcnn_R_50_FPN_1x.yaml new file mode 100644 index 0000000..773ac10 --- /dev/null +++ b/src/simpleimageclassifier/configs/COCO-Detection/fast_rcnn_R_50_FPN_1x.yaml @@ -0,0 +1,17 @@ +_BASE_: "../Base-RCNN-FPN.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + MASK_ON: False + LOAD_PROPOSALS: True + RESNETS: + DEPTH: 50 + PROPOSAL_GENERATOR: + NAME: "PrecomputedProposals" +DATASETS: + TRAIN: ("coco_2017_train",) + PROPOSAL_FILES_TRAIN: ("detectron2://COCO-Detection/rpn_R_50_FPN_1x/137258492/coco_2017_train_box_proposals_21bc3a.pkl", ) + TEST: ("coco_2017_val",) + PROPOSAL_FILES_TEST: ("detectron2://COCO-Detection/rpn_R_50_FPN_1x/137258492/coco_2017_val_box_proposals_ee0dad.pkl", ) +DATALOADER: + # proposals are part of the dataset_dicts, and take a lot of RAM + NUM_WORKERS: 2 diff --git a/src/simpleimageclassifier/configs/COCO-Detection/faster_rcnn_R_101_C4_3x.yaml b/src/simpleimageclassifier/configs/COCO-Detection/faster_rcnn_R_101_C4_3x.yaml new file mode 100644 index 0000000..db142cd --- /dev/null +++ b/src/simpleimageclassifier/configs/COCO-Detection/faster_rcnn_R_101_C4_3x.yaml @@ -0,0 +1,9 @@ +_BASE_: "../Base-RCNN-C4.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-101.pkl" + MASK_ON: False + RESNETS: + DEPTH: 101 +SOLVER: + STEPS: (210000, 250000) + MAX_ITER: 270000 diff --git a/src/simpleimageclassifier/configs/COCO-Detection/faster_rcnn_R_101_DC5_3x.yaml b/src/simpleimageclassifier/configs/COCO-Detection/faster_rcnn_R_101_DC5_3x.yaml new file mode 100644 index 0000000..bceb6b3 --- /dev/null +++ b/src/simpleimageclassifier/configs/COCO-Detection/faster_rcnn_R_101_DC5_3x.yaml @@ -0,0 +1,9 @@ +_BASE_: "../Base-RCNN-DilatedC5.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-101.pkl" + MASK_ON: False + RESNETS: + DEPTH: 101 +SOLVER: + STEPS: (210000, 250000) + MAX_ITER: 270000 diff --git a/src/simpleimageclassifier/configs/COCO-Detection/faster_rcnn_R_101_FPN_3x.yaml b/src/simpleimageclassifier/configs/COCO-Detection/faster_rcnn_R_101_FPN_3x.yaml new file mode 100644 index 0000000..57a098f --- /dev/null +++ b/src/simpleimageclassifier/configs/COCO-Detection/faster_rcnn_R_101_FPN_3x.yaml @@ -0,0 +1,9 @@ +_BASE_: "../Base-RCNN-FPN.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-101.pkl" + MASK_ON: False + RESNETS: + DEPTH: 101 +SOLVER: + STEPS: (210000, 250000) + MAX_ITER: 270000 diff --git a/src/simpleimageclassifier/configs/COCO-Detection/faster_rcnn_R_50_C4_1x.yaml b/src/simpleimageclassifier/configs/COCO-Detection/faster_rcnn_R_50_C4_1x.yaml new file mode 100644 index 0000000..f961301 --- /dev/null +++ b/src/simpleimageclassifier/configs/COCO-Detection/faster_rcnn_R_50_C4_1x.yaml @@ -0,0 +1,6 @@ +_BASE_: "../Base-RCNN-C4.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + MASK_ON: False + RESNETS: + DEPTH: 50 diff --git a/src/simpleimageclassifier/configs/COCO-Detection/faster_rcnn_R_50_C4_3x.yaml b/src/simpleimageclassifier/configs/COCO-Detection/faster_rcnn_R_50_C4_3x.yaml new file mode 100644 index 0000000..bc51bce --- /dev/null +++ b/src/simpleimageclassifier/configs/COCO-Detection/faster_rcnn_R_50_C4_3x.yaml @@ -0,0 +1,9 @@ +_BASE_: "../Base-RCNN-C4.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + MASK_ON: False + RESNETS: + DEPTH: 50 +SOLVER: + STEPS: (210000, 250000) + MAX_ITER: 270000 diff --git a/src/simpleimageclassifier/configs/COCO-Detection/faster_rcnn_R_50_DC5_1x.yaml b/src/simpleimageclassifier/configs/COCO-Detection/faster_rcnn_R_50_DC5_1x.yaml new file mode 100644 index 0000000..0fe96f5 --- /dev/null +++ b/src/simpleimageclassifier/configs/COCO-Detection/faster_rcnn_R_50_DC5_1x.yaml @@ -0,0 +1,6 @@ +_BASE_: "../Base-RCNN-DilatedC5.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + MASK_ON: False + RESNETS: + DEPTH: 50 diff --git a/src/simpleimageclassifier/configs/COCO-Detection/faster_rcnn_R_50_DC5_3x.yaml b/src/simpleimageclassifier/configs/COCO-Detection/faster_rcnn_R_50_DC5_3x.yaml new file mode 100644 index 0000000..33fadeb --- /dev/null +++ b/src/simpleimageclassifier/configs/COCO-Detection/faster_rcnn_R_50_DC5_3x.yaml @@ -0,0 +1,9 @@ +_BASE_: "../Base-RCNN-DilatedC5.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + MASK_ON: False + RESNETS: + DEPTH: 50 +SOLVER: + STEPS: (210000, 250000) + MAX_ITER: 270000 diff --git a/src/simpleimageclassifier/configs/COCO-Detection/faster_rcnn_R_50_FPN_1x.yaml b/src/simpleimageclassifier/configs/COCO-Detection/faster_rcnn_R_50_FPN_1x.yaml new file mode 100644 index 0000000..3262019 --- /dev/null +++ b/src/simpleimageclassifier/configs/COCO-Detection/faster_rcnn_R_50_FPN_1x.yaml @@ -0,0 +1,6 @@ +_BASE_: "../Base-RCNN-FPN.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + MASK_ON: False + RESNETS: + DEPTH: 50 diff --git a/src/simpleimageclassifier/configs/COCO-Detection/faster_rcnn_R_50_FPN_3x.yaml b/src/simpleimageclassifier/configs/COCO-Detection/faster_rcnn_R_50_FPN_3x.yaml new file mode 100644 index 0000000..4139518 --- /dev/null +++ b/src/simpleimageclassifier/configs/COCO-Detection/faster_rcnn_R_50_FPN_3x.yaml @@ -0,0 +1,9 @@ +_BASE_: "../Base-RCNN-FPN.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + MASK_ON: False + RESNETS: + DEPTH: 50 +SOLVER: + STEPS: (210000, 250000) + MAX_ITER: 270000 diff --git a/src/simpleimageclassifier/configs/COCO-Detection/faster_rcnn_X_101_32x8d_FPN_3x.yaml b/src/simpleimageclassifier/configs/COCO-Detection/faster_rcnn_X_101_32x8d_FPN_3x.yaml new file mode 100644 index 0000000..9c9b5ab --- /dev/null +++ b/src/simpleimageclassifier/configs/COCO-Detection/faster_rcnn_X_101_32x8d_FPN_3x.yaml @@ -0,0 +1,13 @@ +_BASE_: "../Base-RCNN-FPN.yaml" +MODEL: + MASK_ON: False + WEIGHTS: "detectron2://ImageNetPretrained/FAIR/X-101-32x8d.pkl" + PIXEL_STD: [57.375, 57.120, 58.395] + RESNETS: + STRIDE_IN_1X1: False # this is a C2 model + NUM_GROUPS: 32 + WIDTH_PER_GROUP: 8 + DEPTH: 101 +SOLVER: + STEPS: (210000, 250000) + MAX_ITER: 270000 diff --git a/src/simpleimageclassifier/configs/COCO-Detection/fcos_R_50_FPN_1x.py b/src/simpleimageclassifier/configs/COCO-Detection/fcos_R_50_FPN_1x.py new file mode 100644 index 0000000..86f83c6 --- /dev/null +++ b/src/simpleimageclassifier/configs/COCO-Detection/fcos_R_50_FPN_1x.py @@ -0,0 +1,11 @@ +from ..common.optim import SGD as optimizer +from ..common.coco_schedule import lr_multiplier_1x as lr_multiplier +from ..common.data.coco import dataloader +from ..common.models.fcos import model +from ..common.train import train + +dataloader.train.mapper.use_instance_mask = False +optimizer.lr = 0.01 + +model.backbone.bottom_up.freeze_at = 2 +train.init_checkpoint = "detectron2://ImageNetPretrained/MSRA/R-50.pkl" diff --git a/src/simpleimageclassifier/configs/COCO-Detection/retinanet_R_101_FPN_3x.yaml b/src/simpleimageclassifier/configs/COCO-Detection/retinanet_R_101_FPN_3x.yaml new file mode 100644 index 0000000..4abb1b9 --- /dev/null +++ b/src/simpleimageclassifier/configs/COCO-Detection/retinanet_R_101_FPN_3x.yaml @@ -0,0 +1,8 @@ +_BASE_: "../Base-RetinaNet.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-101.pkl" + RESNETS: + DEPTH: 101 +SOLVER: + STEPS: (210000, 250000) + MAX_ITER: 270000 diff --git a/src/simpleimageclassifier/configs/COCO-Detection/retinanet_R_50_FPN_1x.py b/src/simpleimageclassifier/configs/COCO-Detection/retinanet_R_50_FPN_1x.py new file mode 100644 index 0000000..43057a8 --- /dev/null +++ b/src/simpleimageclassifier/configs/COCO-Detection/retinanet_R_50_FPN_1x.py @@ -0,0 +1,11 @@ +from ..common.optim import SGD as optimizer +from ..common.coco_schedule import lr_multiplier_1x as lr_multiplier +from ..common.data.coco import dataloader +from ..common.models.retinanet import model +from ..common.train import train + +dataloader.train.mapper.use_instance_mask = False +model.backbone.bottom_up.freeze_at = 2 +optimizer.lr = 0.01 + +train.init_checkpoint = "detectron2://ImageNetPretrained/MSRA/R-50.pkl" diff --git a/src/simpleimageclassifier/configs/COCO-Detection/retinanet_R_50_FPN_1x.yaml b/src/simpleimageclassifier/configs/COCO-Detection/retinanet_R_50_FPN_1x.yaml new file mode 100644 index 0000000..4a24ce3 --- /dev/null +++ b/src/simpleimageclassifier/configs/COCO-Detection/retinanet_R_50_FPN_1x.yaml @@ -0,0 +1,5 @@ +_BASE_: "../Base-RetinaNet.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + RESNETS: + DEPTH: 50 diff --git a/src/simpleimageclassifier/configs/COCO-Detection/retinanet_R_50_FPN_3x.yaml b/src/simpleimageclassifier/configs/COCO-Detection/retinanet_R_50_FPN_3x.yaml new file mode 100644 index 0000000..3b5412d --- /dev/null +++ b/src/simpleimageclassifier/configs/COCO-Detection/retinanet_R_50_FPN_3x.yaml @@ -0,0 +1,8 @@ +_BASE_: "../Base-RetinaNet.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + RESNETS: + DEPTH: 50 +SOLVER: + STEPS: (210000, 250000) + MAX_ITER: 270000 diff --git a/src/simpleimageclassifier/configs/COCO-Detection/rpn_R_50_C4_1x.yaml b/src/simpleimageclassifier/configs/COCO-Detection/rpn_R_50_C4_1x.yaml new file mode 100644 index 0000000..e048211 --- /dev/null +++ b/src/simpleimageclassifier/configs/COCO-Detection/rpn_R_50_C4_1x.yaml @@ -0,0 +1,10 @@ +_BASE_: "../Base-RCNN-C4.yaml" +MODEL: + META_ARCHITECTURE: "ProposalNetwork" + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + MASK_ON: False + RESNETS: + DEPTH: 50 + RPN: + PRE_NMS_TOPK_TEST: 12000 + POST_NMS_TOPK_TEST: 2000 diff --git a/src/simpleimageclassifier/configs/COCO-Detection/rpn_R_50_FPN_1x.yaml b/src/simpleimageclassifier/configs/COCO-Detection/rpn_R_50_FPN_1x.yaml new file mode 100644 index 0000000..dc9c952 --- /dev/null +++ b/src/simpleimageclassifier/configs/COCO-Detection/rpn_R_50_FPN_1x.yaml @@ -0,0 +1,9 @@ +_BASE_: "../Base-RCNN-FPN.yaml" +MODEL: + META_ARCHITECTURE: "ProposalNetwork" + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + MASK_ON: False + RESNETS: + DEPTH: 50 + RPN: + POST_NMS_TOPK_TEST: 2000 diff --git a/src/simpleimageclassifier/configs/COCO-InstanceSegmentation/mask_rcnn_R_101_C4_3x.yaml b/src/simpleimageclassifier/configs/COCO-InstanceSegmentation/mask_rcnn_R_101_C4_3x.yaml new file mode 100644 index 0000000..1a94cc4 --- /dev/null +++ b/src/simpleimageclassifier/configs/COCO-InstanceSegmentation/mask_rcnn_R_101_C4_3x.yaml @@ -0,0 +1,9 @@ +_BASE_: "../Base-RCNN-C4.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-101.pkl" + MASK_ON: True + RESNETS: + DEPTH: 101 +SOLVER: + STEPS: (210000, 250000) + MAX_ITER: 270000 diff --git a/src/simpleimageclassifier/configs/COCO-InstanceSegmentation/mask_rcnn_R_101_DC5_3x.yaml b/src/simpleimageclassifier/configs/COCO-InstanceSegmentation/mask_rcnn_R_101_DC5_3x.yaml new file mode 100644 index 0000000..67b70cf --- /dev/null +++ b/src/simpleimageclassifier/configs/COCO-InstanceSegmentation/mask_rcnn_R_101_DC5_3x.yaml @@ -0,0 +1,9 @@ +_BASE_: "../Base-RCNN-DilatedC5.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-101.pkl" + MASK_ON: True + RESNETS: + DEPTH: 101 +SOLVER: + STEPS: (210000, 250000) + MAX_ITER: 270000 diff --git a/src/simpleimageclassifier/configs/COCO-InstanceSegmentation/mask_rcnn_R_101_FPN_3x.yaml b/src/simpleimageclassifier/configs/COCO-InstanceSegmentation/mask_rcnn_R_101_FPN_3x.yaml new file mode 100644 index 0000000..1935a30 --- /dev/null +++ b/src/simpleimageclassifier/configs/COCO-InstanceSegmentation/mask_rcnn_R_101_FPN_3x.yaml @@ -0,0 +1,9 @@ +_BASE_: "../Base-RCNN-FPN.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-101.pkl" + MASK_ON: True + RESNETS: + DEPTH: 101 +SOLVER: + STEPS: (210000, 250000) + MAX_ITER: 270000 diff --git a/src/simpleimageclassifier/configs/COCO-InstanceSegmentation/mask_rcnn_R_50_C4_1x.py b/src/simpleimageclassifier/configs/COCO-InstanceSegmentation/mask_rcnn_R_50_C4_1x.py new file mode 100644 index 0000000..22016be --- /dev/null +++ b/src/simpleimageclassifier/configs/COCO-InstanceSegmentation/mask_rcnn_R_50_C4_1x.py @@ -0,0 +1,8 @@ +from ..common.train import train +from ..common.optim import SGD as optimizer +from ..common.coco_schedule import lr_multiplier_1x as lr_multiplier +from ..common.data.coco import dataloader +from ..common.models.mask_rcnn_c4 import model + +model.backbone.freeze_at = 2 +train.init_checkpoint = "detectron2://ImageNetPretrained/MSRA/R-50.pkl" diff --git a/src/simpleimageclassifier/configs/COCO-InstanceSegmentation/mask_rcnn_R_50_C4_1x.yaml b/src/simpleimageclassifier/configs/COCO-InstanceSegmentation/mask_rcnn_R_50_C4_1x.yaml new file mode 100644 index 0000000..a9aeb4e --- /dev/null +++ b/src/simpleimageclassifier/configs/COCO-InstanceSegmentation/mask_rcnn_R_50_C4_1x.yaml @@ -0,0 +1,6 @@ +_BASE_: "../Base-RCNN-C4.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + MASK_ON: True + RESNETS: + DEPTH: 50 diff --git a/src/simpleimageclassifier/configs/COCO-InstanceSegmentation/mask_rcnn_R_50_C4_3x.yaml b/src/simpleimageclassifier/configs/COCO-InstanceSegmentation/mask_rcnn_R_50_C4_3x.yaml new file mode 100644 index 0000000..38ed867 --- /dev/null +++ b/src/simpleimageclassifier/configs/COCO-InstanceSegmentation/mask_rcnn_R_50_C4_3x.yaml @@ -0,0 +1,9 @@ +_BASE_: "../Base-RCNN-C4.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + MASK_ON: True + RESNETS: + DEPTH: 50 +SOLVER: + STEPS: (210000, 250000) + MAX_ITER: 270000 diff --git a/src/simpleimageclassifier/configs/COCO-InstanceSegmentation/mask_rcnn_R_50_DC5_1x.yaml b/src/simpleimageclassifier/configs/COCO-InstanceSegmentation/mask_rcnn_R_50_DC5_1x.yaml new file mode 100644 index 0000000..b13eefa --- /dev/null +++ b/src/simpleimageclassifier/configs/COCO-InstanceSegmentation/mask_rcnn_R_50_DC5_1x.yaml @@ -0,0 +1,6 @@ +_BASE_: "../Base-RCNN-DilatedC5.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + MASK_ON: True + RESNETS: + DEPTH: 50 diff --git a/src/simpleimageclassifier/configs/COCO-InstanceSegmentation/mask_rcnn_R_50_DC5_3x.yaml b/src/simpleimageclassifier/configs/COCO-InstanceSegmentation/mask_rcnn_R_50_DC5_3x.yaml new file mode 100644 index 0000000..d401016 --- /dev/null +++ b/src/simpleimageclassifier/configs/COCO-InstanceSegmentation/mask_rcnn_R_50_DC5_3x.yaml @@ -0,0 +1,9 @@ +_BASE_: "../Base-RCNN-DilatedC5.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + MASK_ON: True + RESNETS: + DEPTH: 50 +SOLVER: + STEPS: (210000, 250000) + MAX_ITER: 270000 diff --git a/src/simpleimageclassifier/configs/COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_1x.py b/src/simpleimageclassifier/configs/COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_1x.py new file mode 100644 index 0000000..40844dd --- /dev/null +++ b/src/simpleimageclassifier/configs/COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_1x.py @@ -0,0 +1,8 @@ +from ..common.optim import SGD as optimizer +from ..common.coco_schedule import lr_multiplier_1x as lr_multiplier +from ..common.data.coco import dataloader +from ..common.models.mask_rcnn_fpn import model +from ..common.train import train + +model.backbone.bottom_up.freeze_at = 2 +train.init_checkpoint = "detectron2://ImageNetPretrained/MSRA/R-50.pkl" diff --git a/src/simpleimageclassifier/configs/COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_1x.yaml b/src/simpleimageclassifier/configs/COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_1x.yaml new file mode 100644 index 0000000..d50fb86 --- /dev/null +++ b/src/simpleimageclassifier/configs/COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_1x.yaml @@ -0,0 +1,6 @@ +_BASE_: "../Base-RCNN-FPN.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + MASK_ON: True + RESNETS: + DEPTH: 50 diff --git a/src/simpleimageclassifier/configs/COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_1x_giou.yaml b/src/simpleimageclassifier/configs/COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_1x_giou.yaml new file mode 100644 index 0000000..bec680e --- /dev/null +++ b/src/simpleimageclassifier/configs/COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_1x_giou.yaml @@ -0,0 +1,12 @@ +_BASE_: "../Base-RCNN-FPN.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + MASK_ON: True + RESNETS: + DEPTH: 50 + RPN: + BBOX_REG_LOSS_TYPE: "giou" + BBOX_REG_LOSS_WEIGHT: 2.0 + ROI_BOX_HEAD: + BBOX_REG_LOSS_TYPE: "giou" + BBOX_REG_LOSS_WEIGHT: 10.0 diff --git a/src/simpleimageclassifier/configs/COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_3x.yaml b/src/simpleimageclassifier/configs/COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_3x.yaml new file mode 100644 index 0000000..be7d06b --- /dev/null +++ b/src/simpleimageclassifier/configs/COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_3x.yaml @@ -0,0 +1,9 @@ +_BASE_: "../Base-RCNN-FPN.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + MASK_ON: True + RESNETS: + DEPTH: 50 +SOLVER: + STEPS: (210000, 250000) + MAX_ITER: 270000 diff --git a/src/simpleimageclassifier/configs/COCO-InstanceSegmentation/mask_rcnn_X_101_32x8d_FPN_3x.yaml b/src/simpleimageclassifier/configs/COCO-InstanceSegmentation/mask_rcnn_X_101_32x8d_FPN_3x.yaml new file mode 100644 index 0000000..d14c63f --- /dev/null +++ b/src/simpleimageclassifier/configs/COCO-InstanceSegmentation/mask_rcnn_X_101_32x8d_FPN_3x.yaml @@ -0,0 +1,13 @@ +_BASE_: "../Base-RCNN-FPN.yaml" +MODEL: + MASK_ON: True + WEIGHTS: "detectron2://ImageNetPretrained/FAIR/X-101-32x8d.pkl" + PIXEL_STD: [57.375, 57.120, 58.395] + RESNETS: + STRIDE_IN_1X1: False # this is a C2 model + NUM_GROUPS: 32 + WIDTH_PER_GROUP: 8 + DEPTH: 101 +SOLVER: + STEPS: (210000, 250000) + MAX_ITER: 270000 diff --git a/src/simpleimageclassifier/configs/COCO-InstanceSegmentation/mask_rcnn_regnetx_4gf_dds_fpn_1x.py b/src/simpleimageclassifier/configs/COCO-InstanceSegmentation/mask_rcnn_regnetx_4gf_dds_fpn_1x.py new file mode 100644 index 0000000..d7bbdd7 --- /dev/null +++ b/src/simpleimageclassifier/configs/COCO-InstanceSegmentation/mask_rcnn_regnetx_4gf_dds_fpn_1x.py @@ -0,0 +1,34 @@ +from ..common.optim import SGD as optimizer +from ..common.coco_schedule import lr_multiplier_1x as lr_multiplier +from ..common.data.coco import dataloader +from ..common.models.mask_rcnn_fpn import model +from ..common.train import train + +from detectron2.config import LazyCall as L +from detectron2.modeling.backbone import RegNet +from detectron2.modeling.backbone.regnet import SimpleStem, ResBottleneckBlock + + +# Replace default ResNet with RegNetX-4GF from the DDS paper. Config source: +# https://github.com/facebookresearch/pycls/blob/2c152a6e5d913e898cca4f0a758f41e6b976714d/configs/dds_baselines/regnetx/RegNetX-4.0GF_dds_8gpu.yaml#L4-L9 # noqa +model.backbone.bottom_up = L(RegNet)( + stem_class=SimpleStem, + stem_width=32, + block_class=ResBottleneckBlock, + depth=23, + w_a=38.65, + w_0=96, + w_m=2.43, + group_width=40, + freeze_at=2, + norm="FrozenBN", + out_features=["s1", "s2", "s3", "s4"], +) +model.pixel_std = [57.375, 57.120, 58.395] + +optimizer.weight_decay = 5e-5 +train.init_checkpoint = ( + "https://dl.fbaipublicfiles.com/pycls/dds_baselines/160906383/RegNetX-4.0GF_dds_8gpu.pyth" +) +# RegNets benefit from enabling cudnn benchmark mode +train.cudnn_benchmark = True diff --git a/src/simpleimageclassifier/configs/COCO-InstanceSegmentation/mask_rcnn_regnety_4gf_dds_fpn_1x.py b/src/simpleimageclassifier/configs/COCO-InstanceSegmentation/mask_rcnn_regnety_4gf_dds_fpn_1x.py new file mode 100644 index 0000000..72c6b7a --- /dev/null +++ b/src/simpleimageclassifier/configs/COCO-InstanceSegmentation/mask_rcnn_regnety_4gf_dds_fpn_1x.py @@ -0,0 +1,35 @@ +from ..common.optim import SGD as optimizer +from ..common.coco_schedule import lr_multiplier_1x as lr_multiplier +from ..common.data.coco import dataloader +from ..common.models.mask_rcnn_fpn import model +from ..common.train import train + +from detectron2.config import LazyCall as L +from detectron2.modeling.backbone import RegNet +from detectron2.modeling.backbone.regnet import SimpleStem, ResBottleneckBlock + + +# Replace default ResNet with RegNetY-4GF from the DDS paper. Config source: +# https://github.com/facebookresearch/pycls/blob/2c152a6e5d913e898cca4f0a758f41e6b976714d/configs/dds_baselines/regnety/RegNetY-4.0GF_dds_8gpu.yaml#L4-L10 # noqa +model.backbone.bottom_up = L(RegNet)( + stem_class=SimpleStem, + stem_width=32, + block_class=ResBottleneckBlock, + depth=22, + w_a=31.41, + w_0=96, + w_m=2.24, + group_width=64, + se_ratio=0.25, + freeze_at=2, + norm="FrozenBN", + out_features=["s1", "s2", "s3", "s4"], +) +model.pixel_std = [57.375, 57.120, 58.395] + +optimizer.weight_decay = 5e-5 +train.init_checkpoint = ( + "https://dl.fbaipublicfiles.com/pycls/dds_baselines/160906838/RegNetY-4.0GF_dds_8gpu.pyth" +) +# RegNets benefit from enabling cudnn benchmark mode +train.cudnn_benchmark = True diff --git a/src/simpleimageclassifier/configs/COCO-Keypoints/Base-Keypoint-RCNN-FPN.yaml b/src/simpleimageclassifier/configs/COCO-Keypoints/Base-Keypoint-RCNN-FPN.yaml new file mode 100644 index 0000000..4e03944 --- /dev/null +++ b/src/simpleimageclassifier/configs/COCO-Keypoints/Base-Keypoint-RCNN-FPN.yaml @@ -0,0 +1,15 @@ +_BASE_: "../Base-RCNN-FPN.yaml" +MODEL: + KEYPOINT_ON: True + ROI_HEADS: + NUM_CLASSES: 1 + ROI_BOX_HEAD: + SMOOTH_L1_BETA: 0.5 # Keypoint AP degrades (though box AP improves) when using plain L1 loss + RPN: + # Detectron1 uses 2000 proposals per-batch, but this option is per-image in detectron2. + # 1000 proposals per-image is found to hurt box AP. + # Therefore we increase it to 1500 per-image. + POST_NMS_TOPK_TRAIN: 1500 +DATASETS: + TRAIN: ("keypoints_coco_2017_train",) + TEST: ("keypoints_coco_2017_val",) diff --git a/src/simpleimageclassifier/configs/COCO-Keypoints/keypoint_rcnn_R_101_FPN_3x.yaml b/src/simpleimageclassifier/configs/COCO-Keypoints/keypoint_rcnn_R_101_FPN_3x.yaml new file mode 100644 index 0000000..9309535 --- /dev/null +++ b/src/simpleimageclassifier/configs/COCO-Keypoints/keypoint_rcnn_R_101_FPN_3x.yaml @@ -0,0 +1,8 @@ +_BASE_: "Base-Keypoint-RCNN-FPN.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-101.pkl" + RESNETS: + DEPTH: 101 +SOLVER: + STEPS: (210000, 250000) + MAX_ITER: 270000 diff --git a/src/simpleimageclassifier/configs/COCO-Keypoints/keypoint_rcnn_R_50_FPN_1x.py b/src/simpleimageclassifier/configs/COCO-Keypoints/keypoint_rcnn_R_50_FPN_1x.py new file mode 100644 index 0000000..1aad53b --- /dev/null +++ b/src/simpleimageclassifier/configs/COCO-Keypoints/keypoint_rcnn_R_50_FPN_1x.py @@ -0,0 +1,8 @@ +from ..common.optim import SGD as optimizer +from ..common.coco_schedule import lr_multiplier_1x as lr_multiplier +from ..common.data.coco_keypoint import dataloader +from ..common.models.keypoint_rcnn_fpn import model +from ..common.train import train + +model.backbone.bottom_up.freeze_at = 2 +train.init_checkpoint = "detectron2://ImageNetPretrained/MSRA/R-50.pkl" diff --git a/src/simpleimageclassifier/configs/COCO-Keypoints/keypoint_rcnn_R_50_FPN_1x.yaml b/src/simpleimageclassifier/configs/COCO-Keypoints/keypoint_rcnn_R_50_FPN_1x.yaml new file mode 100644 index 0000000..7bf85cf --- /dev/null +++ b/src/simpleimageclassifier/configs/COCO-Keypoints/keypoint_rcnn_R_50_FPN_1x.yaml @@ -0,0 +1,5 @@ +_BASE_: "Base-Keypoint-RCNN-FPN.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + RESNETS: + DEPTH: 50 diff --git a/src/simpleimageclassifier/configs/COCO-Keypoints/keypoint_rcnn_R_50_FPN_3x.yaml b/src/simpleimageclassifier/configs/COCO-Keypoints/keypoint_rcnn_R_50_FPN_3x.yaml new file mode 100644 index 0000000..a07f243 --- /dev/null +++ b/src/simpleimageclassifier/configs/COCO-Keypoints/keypoint_rcnn_R_50_FPN_3x.yaml @@ -0,0 +1,8 @@ +_BASE_: "Base-Keypoint-RCNN-FPN.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + RESNETS: + DEPTH: 50 +SOLVER: + STEPS: (210000, 250000) + MAX_ITER: 270000 diff --git a/src/simpleimageclassifier/configs/COCO-Keypoints/keypoint_rcnn_X_101_32x8d_FPN_3x.yaml b/src/simpleimageclassifier/configs/COCO-Keypoints/keypoint_rcnn_X_101_32x8d_FPN_3x.yaml new file mode 100644 index 0000000..d4bfa20 --- /dev/null +++ b/src/simpleimageclassifier/configs/COCO-Keypoints/keypoint_rcnn_X_101_32x8d_FPN_3x.yaml @@ -0,0 +1,12 @@ +_BASE_: "Base-Keypoint-RCNN-FPN.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/FAIR/X-101-32x8d.pkl" + PIXEL_STD: [57.375, 57.120, 58.395] + RESNETS: + STRIDE_IN_1X1: False # this is a C2 model + NUM_GROUPS: 32 + WIDTH_PER_GROUP: 8 + DEPTH: 101 +SOLVER: + STEPS: (210000, 250000) + MAX_ITER: 270000 diff --git a/src/simpleimageclassifier/configs/COCO-PanopticSegmentation/Base-Panoptic-FPN.yaml b/src/simpleimageclassifier/configs/COCO-PanopticSegmentation/Base-Panoptic-FPN.yaml new file mode 100644 index 0000000..f00d54b --- /dev/null +++ b/src/simpleimageclassifier/configs/COCO-PanopticSegmentation/Base-Panoptic-FPN.yaml @@ -0,0 +1,11 @@ +_BASE_: "../Base-RCNN-FPN.yaml" +MODEL: + META_ARCHITECTURE: "PanopticFPN" + MASK_ON: True + SEM_SEG_HEAD: + LOSS_WEIGHT: 0.5 +DATASETS: + TRAIN: ("coco_2017_train_panoptic_separated",) + TEST: ("coco_2017_val_panoptic_separated",) +DATALOADER: + FILTER_EMPTY_ANNOTATIONS: False diff --git a/src/simpleimageclassifier/configs/COCO-PanopticSegmentation/panoptic_fpn_R_101_3x.yaml b/src/simpleimageclassifier/configs/COCO-PanopticSegmentation/panoptic_fpn_R_101_3x.yaml new file mode 100644 index 0000000..0e01f6f --- /dev/null +++ b/src/simpleimageclassifier/configs/COCO-PanopticSegmentation/panoptic_fpn_R_101_3x.yaml @@ -0,0 +1,8 @@ +_BASE_: "Base-Panoptic-FPN.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-101.pkl" + RESNETS: + DEPTH: 101 +SOLVER: + STEPS: (210000, 250000) + MAX_ITER: 270000 diff --git a/src/simpleimageclassifier/configs/COCO-PanopticSegmentation/panoptic_fpn_R_50_1x.py b/src/simpleimageclassifier/configs/COCO-PanopticSegmentation/panoptic_fpn_R_50_1x.py new file mode 100644 index 0000000..40cf181 --- /dev/null +++ b/src/simpleimageclassifier/configs/COCO-PanopticSegmentation/panoptic_fpn_R_50_1x.py @@ -0,0 +1,8 @@ +from ..common.optim import SGD as optimizer +from ..common.coco_schedule import lr_multiplier_1x as lr_multiplier +from ..common.data.coco_panoptic_separated import dataloader +from ..common.models.panoptic_fpn import model +from ..common.train import train + +model.backbone.bottom_up.freeze_at = 2 +train.init_checkpoint = "detectron2://ImageNetPretrained/MSRA/R-50.pkl" diff --git a/src/simpleimageclassifier/configs/COCO-PanopticSegmentation/panoptic_fpn_R_50_1x.yaml b/src/simpleimageclassifier/configs/COCO-PanopticSegmentation/panoptic_fpn_R_50_1x.yaml new file mode 100644 index 0000000..6afa2c1 --- /dev/null +++ b/src/simpleimageclassifier/configs/COCO-PanopticSegmentation/panoptic_fpn_R_50_1x.yaml @@ -0,0 +1,5 @@ +_BASE_: "Base-Panoptic-FPN.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + RESNETS: + DEPTH: 50 diff --git a/src/simpleimageclassifier/configs/COCO-PanopticSegmentation/panoptic_fpn_R_50_3x.yaml b/src/simpleimageclassifier/configs/COCO-PanopticSegmentation/panoptic_fpn_R_50_3x.yaml new file mode 100644 index 0000000..b956b3f --- /dev/null +++ b/src/simpleimageclassifier/configs/COCO-PanopticSegmentation/panoptic_fpn_R_50_3x.yaml @@ -0,0 +1,8 @@ +_BASE_: "Base-Panoptic-FPN.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + RESNETS: + DEPTH: 50 +SOLVER: + STEPS: (210000, 250000) + MAX_ITER: 270000 diff --git a/src/simpleimageclassifier/configs/Cityscapes/mask_rcnn_R_50_FPN.yaml b/src/simpleimageclassifier/configs/Cityscapes/mask_rcnn_R_50_FPN.yaml new file mode 100644 index 0000000..1a7aaeb --- /dev/null +++ b/src/simpleimageclassifier/configs/Cityscapes/mask_rcnn_R_50_FPN.yaml @@ -0,0 +1,27 @@ +_BASE_: "../Base-RCNN-FPN.yaml" +MODEL: + # WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + # For better, more stable performance initialize from COCO + WEIGHTS: "detectron2://COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_3x/137849600/model_final_f10217.pkl" + MASK_ON: True + ROI_HEADS: + NUM_CLASSES: 8 +# This is similar to the setting used in Mask R-CNN paper, Appendix A +# But there are some differences, e.g., we did not initialize the output +# layer using the corresponding classes from COCO +INPUT: + MIN_SIZE_TRAIN: (800, 832, 864, 896, 928, 960, 992, 1024) + MIN_SIZE_TRAIN_SAMPLING: "choice" + MIN_SIZE_TEST: 1024 + MAX_SIZE_TRAIN: 2048 + MAX_SIZE_TEST: 2048 +DATASETS: + TRAIN: ("cityscapes_fine_instance_seg_train",) + TEST: ("cityscapes_fine_instance_seg_val",) +SOLVER: + BASE_LR: 0.01 + STEPS: (18000,) + MAX_ITER: 24000 + IMS_PER_BATCH: 8 +TEST: + EVAL_PERIOD: 8000 diff --git a/src/simpleimageclassifier/configs/Detectron1-Comparisons/README.md b/src/simpleimageclassifier/configs/Detectron1-Comparisons/README.md new file mode 100644 index 0000000..924fd00 --- /dev/null +++ b/src/simpleimageclassifier/configs/Detectron1-Comparisons/README.md @@ -0,0 +1,84 @@ + +Detectron2 model zoo's experimental settings and a few implementation details are different from Detectron. + +The differences in implementation details are shared in +[Compatibility with Other Libraries](../../docs/notes/compatibility.md). + +The differences in model zoo's experimental settings include: +* Use scale augmentation during training. This improves AP with lower training cost. +* Use L1 loss instead of smooth L1 loss for simplicity. This sometimes improves box AP but may + affect other AP. +* Use `POOLER_SAMPLING_RATIO=0` instead of 2. This does not significantly affect AP. +* Use `ROIAlignV2`. This does not significantly affect AP. + +In this directory, we provide a few configs that __do not__ have the above changes. +They mimic Detectron's behavior as close as possible, +and provide a fair comparison of accuracy and speed against Detectron. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Namelr
sched
train
time
(s/iter)
inference
time
(s/im)
train
mem
(GB)
box
AP
mask
AP
kp.
AP
model iddownload
Faster R-CNN1x0.2190.0383.136.9137781054model | metrics
Keypoint R-CNN1x0.3130.0715.053.164.2137781195model | metrics
Mask R-CNN1x0.2730.0433.437.834.9137781281model | metrics
+ +## Comparisons: + +* Faster R-CNN: Detectron's AP is 36.7, similar to ours. +* Keypoint R-CNN: Detectron's AP is box 53.6, keypoint 64.2. Fixing a Detectron's + [bug](https://github.com/facebookresearch/Detectron/issues/459) lead to a drop in box AP, and can be + compensated back by some parameter tuning. +* Mask R-CNN: Detectron's AP is box 37.7, mask 33.9. We're 1 AP better in mask AP, due to more correct implementation. + See [this article](https://ppwwyyxx.com/blog/2021/Where-are-Pixels/) for details. + +For speed comparison, see [benchmarks](https://detectron2.readthedocs.io/notes/benchmarks.html). diff --git a/src/simpleimageclassifier/configs/Detectron1-Comparisons/faster_rcnn_R_50_FPN_noaug_1x.yaml b/src/simpleimageclassifier/configs/Detectron1-Comparisons/faster_rcnn_R_50_FPN_noaug_1x.yaml new file mode 100644 index 0000000..6ce77f1 --- /dev/null +++ b/src/simpleimageclassifier/configs/Detectron1-Comparisons/faster_rcnn_R_50_FPN_noaug_1x.yaml @@ -0,0 +1,17 @@ +_BASE_: "../Base-RCNN-FPN.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + MASK_ON: False + RESNETS: + DEPTH: 50 + # Detectron1 uses smooth L1 loss with some magic beta values. + # The defaults are changed to L1 loss in Detectron2. + RPN: + SMOOTH_L1_BETA: 0.1111 + ROI_BOX_HEAD: + SMOOTH_L1_BETA: 1.0 + POOLER_SAMPLING_RATIO: 2 + POOLER_TYPE: "ROIAlign" +INPUT: + # no scale augmentation + MIN_SIZE_TRAIN: (800, ) diff --git a/src/simpleimageclassifier/configs/Detectron1-Comparisons/keypoint_rcnn_R_50_FPN_1x.yaml b/src/simpleimageclassifier/configs/Detectron1-Comparisons/keypoint_rcnn_R_50_FPN_1x.yaml new file mode 100644 index 0000000..aacf868 --- /dev/null +++ b/src/simpleimageclassifier/configs/Detectron1-Comparisons/keypoint_rcnn_R_50_FPN_1x.yaml @@ -0,0 +1,27 @@ +_BASE_: "../Base-RCNN-FPN.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + KEYPOINT_ON: True + RESNETS: + DEPTH: 50 + ROI_HEADS: + NUM_CLASSES: 1 + ROI_KEYPOINT_HEAD: + POOLER_RESOLUTION: 14 + POOLER_SAMPLING_RATIO: 2 + POOLER_TYPE: "ROIAlign" + # Detectron1 uses smooth L1 loss with some magic beta values. + # The defaults are changed to L1 loss in Detectron2. + ROI_BOX_HEAD: + SMOOTH_L1_BETA: 1.0 + POOLER_SAMPLING_RATIO: 2 + POOLER_TYPE: "ROIAlign" + RPN: + SMOOTH_L1_BETA: 0.1111 + # Detectron1 uses 2000 proposals per-batch, but this option is per-image in detectron2 + # 1000 proposals per-image is found to hurt box AP. + # Therefore we increase it to 1500 per-image. + POST_NMS_TOPK_TRAIN: 1500 +DATASETS: + TRAIN: ("keypoints_coco_2017_train",) + TEST: ("keypoints_coco_2017_val",) diff --git a/src/simpleimageclassifier/configs/Detectron1-Comparisons/mask_rcnn_R_50_FPN_noaug_1x.yaml b/src/simpleimageclassifier/configs/Detectron1-Comparisons/mask_rcnn_R_50_FPN_noaug_1x.yaml new file mode 100644 index 0000000..4ea86a8 --- /dev/null +++ b/src/simpleimageclassifier/configs/Detectron1-Comparisons/mask_rcnn_R_50_FPN_noaug_1x.yaml @@ -0,0 +1,20 @@ +_BASE_: "../Base-RCNN-FPN.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + MASK_ON: True + RESNETS: + DEPTH: 50 + # Detectron1 uses smooth L1 loss with some magic beta values. + # The defaults are changed to L1 loss in Detectron2. + RPN: + SMOOTH_L1_BETA: 0.1111 + ROI_BOX_HEAD: + SMOOTH_L1_BETA: 1.0 + POOLER_SAMPLING_RATIO: 2 + POOLER_TYPE: "ROIAlign" + ROI_MASK_HEAD: + POOLER_SAMPLING_RATIO: 2 + POOLER_TYPE: "ROIAlign" +INPUT: + # no scale augmentation + MIN_SIZE_TRAIN: (800, ) diff --git a/src/simpleimageclassifier/configs/LVISv0.5-InstanceSegmentation/mask_rcnn_R_101_FPN_1x.yaml b/src/simpleimageclassifier/configs/LVISv0.5-InstanceSegmentation/mask_rcnn_R_101_FPN_1x.yaml new file mode 100644 index 0000000..f0c3a1b --- /dev/null +++ b/src/simpleimageclassifier/configs/LVISv0.5-InstanceSegmentation/mask_rcnn_R_101_FPN_1x.yaml @@ -0,0 +1,19 @@ +_BASE_: "../Base-RCNN-FPN.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-101.pkl" + MASK_ON: True + RESNETS: + DEPTH: 101 + ROI_HEADS: + NUM_CLASSES: 1230 + SCORE_THRESH_TEST: 0.0001 +INPUT: + MIN_SIZE_TRAIN: (640, 672, 704, 736, 768, 800) +DATASETS: + TRAIN: ("lvis_v0.5_train",) + TEST: ("lvis_v0.5_val",) +TEST: + DETECTIONS_PER_IMAGE: 300 # LVIS allows up to 300 +DATALOADER: + SAMPLER_TRAIN: "RepeatFactorTrainingSampler" + REPEAT_THRESHOLD: 0.001 diff --git a/src/simpleimageclassifier/configs/LVISv0.5-InstanceSegmentation/mask_rcnn_R_50_FPN_1x.yaml b/src/simpleimageclassifier/configs/LVISv0.5-InstanceSegmentation/mask_rcnn_R_50_FPN_1x.yaml new file mode 100644 index 0000000..64b4caa --- /dev/null +++ b/src/simpleimageclassifier/configs/LVISv0.5-InstanceSegmentation/mask_rcnn_R_50_FPN_1x.yaml @@ -0,0 +1,19 @@ +_BASE_: "../Base-RCNN-FPN.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + MASK_ON: True + RESNETS: + DEPTH: 50 + ROI_HEADS: + NUM_CLASSES: 1230 + SCORE_THRESH_TEST: 0.0001 +INPUT: + MIN_SIZE_TRAIN: (640, 672, 704, 736, 768, 800) +DATASETS: + TRAIN: ("lvis_v0.5_train",) + TEST: ("lvis_v0.5_val",) +TEST: + DETECTIONS_PER_IMAGE: 300 # LVIS allows up to 300 +DATALOADER: + SAMPLER_TRAIN: "RepeatFactorTrainingSampler" + REPEAT_THRESHOLD: 0.001 diff --git a/src/simpleimageclassifier/configs/LVISv0.5-InstanceSegmentation/mask_rcnn_X_101_32x8d_FPN_1x.yaml b/src/simpleimageclassifier/configs/LVISv0.5-InstanceSegmentation/mask_rcnn_X_101_32x8d_FPN_1x.yaml new file mode 100644 index 0000000..c8b822c --- /dev/null +++ b/src/simpleimageclassifier/configs/LVISv0.5-InstanceSegmentation/mask_rcnn_X_101_32x8d_FPN_1x.yaml @@ -0,0 +1,23 @@ +_BASE_: "../Base-RCNN-FPN.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/FAIR/X-101-32x8d.pkl" + PIXEL_STD: [57.375, 57.120, 58.395] + MASK_ON: True + RESNETS: + STRIDE_IN_1X1: False # this is a C2 model + NUM_GROUPS: 32 + WIDTH_PER_GROUP: 8 + DEPTH: 101 + ROI_HEADS: + NUM_CLASSES: 1230 + SCORE_THRESH_TEST: 0.0001 +INPUT: + MIN_SIZE_TRAIN: (640, 672, 704, 736, 768, 800) +DATASETS: + TRAIN: ("lvis_v0.5_train",) + TEST: ("lvis_v0.5_val",) +TEST: + DETECTIONS_PER_IMAGE: 300 # LVIS allows up to 300 +DATALOADER: + SAMPLER_TRAIN: "RepeatFactorTrainingSampler" + REPEAT_THRESHOLD: 0.001 diff --git a/src/simpleimageclassifier/configs/LVISv1-InstanceSegmentation/mask_rcnn_R_101_FPN_1x.yaml b/src/simpleimageclassifier/configs/LVISv1-InstanceSegmentation/mask_rcnn_R_101_FPN_1x.yaml new file mode 100644 index 0000000..ca4dd97 --- /dev/null +++ b/src/simpleimageclassifier/configs/LVISv1-InstanceSegmentation/mask_rcnn_R_101_FPN_1x.yaml @@ -0,0 +1,22 @@ +_BASE_: "../Base-RCNN-FPN.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-101.pkl" + MASK_ON: True + RESNETS: + DEPTH: 101 + ROI_HEADS: + NUM_CLASSES: 1203 + SCORE_THRESH_TEST: 0.0001 +INPUT: + MIN_SIZE_TRAIN: (640, 672, 704, 736, 768, 800) +DATASETS: + TRAIN: ("lvis_v1_train",) + TEST: ("lvis_v1_val",) +TEST: + DETECTIONS_PER_IMAGE: 300 # LVIS allows up to 300 +SOLVER: + STEPS: (120000, 160000) + MAX_ITER: 180000 # 180000 * 16 / 100000 ~ 28.8 epochs +DATALOADER: + SAMPLER_TRAIN: "RepeatFactorTrainingSampler" + REPEAT_THRESHOLD: 0.001 diff --git a/src/simpleimageclassifier/configs/LVISv1-InstanceSegmentation/mask_rcnn_R_50_FPN_1x.yaml b/src/simpleimageclassifier/configs/LVISv1-InstanceSegmentation/mask_rcnn_R_50_FPN_1x.yaml new file mode 100644 index 0000000..f313295 --- /dev/null +++ b/src/simpleimageclassifier/configs/LVISv1-InstanceSegmentation/mask_rcnn_R_50_FPN_1x.yaml @@ -0,0 +1,22 @@ +_BASE_: "../Base-RCNN-FPN.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + MASK_ON: True + RESNETS: + DEPTH: 50 + ROI_HEADS: + NUM_CLASSES: 1203 + SCORE_THRESH_TEST: 0.0001 +INPUT: + MIN_SIZE_TRAIN: (640, 672, 704, 736, 768, 800) +DATASETS: + TRAIN: ("lvis_v1_train",) + TEST: ("lvis_v1_val",) +TEST: + DETECTIONS_PER_IMAGE: 300 # LVIS allows up to 300 +SOLVER: + STEPS: (120000, 160000) + MAX_ITER: 180000 # 180000 * 16 / 100000 ~ 28.8 epochs +DATALOADER: + SAMPLER_TRAIN: "RepeatFactorTrainingSampler" + REPEAT_THRESHOLD: 0.001 diff --git a/src/simpleimageclassifier/configs/LVISv1-InstanceSegmentation/mask_rcnn_X_101_32x8d_FPN_1x.yaml b/src/simpleimageclassifier/configs/LVISv1-InstanceSegmentation/mask_rcnn_X_101_32x8d_FPN_1x.yaml new file mode 100644 index 0000000..f6528f7 --- /dev/null +++ b/src/simpleimageclassifier/configs/LVISv1-InstanceSegmentation/mask_rcnn_X_101_32x8d_FPN_1x.yaml @@ -0,0 +1,26 @@ +_BASE_: "../Base-RCNN-FPN.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/FAIR/X-101-32x8d.pkl" + PIXEL_STD: [57.375, 57.120, 58.395] + MASK_ON: True + RESNETS: + STRIDE_IN_1X1: False # this is a C2 model + NUM_GROUPS: 32 + WIDTH_PER_GROUP: 8 + DEPTH: 101 + ROI_HEADS: + NUM_CLASSES: 1203 + SCORE_THRESH_TEST: 0.0001 +INPUT: + MIN_SIZE_TRAIN: (640, 672, 704, 736, 768, 800) +DATASETS: + TRAIN: ("lvis_v1_train",) + TEST: ("lvis_v1_val",) +SOLVER: + STEPS: (120000, 160000) + MAX_ITER: 180000 # 180000 * 16 / 100000 ~ 28.8 epochs +TEST: + DETECTIONS_PER_IMAGE: 300 # LVIS allows up to 300 +DATALOADER: + SAMPLER_TRAIN: "RepeatFactorTrainingSampler" + REPEAT_THRESHOLD: 0.001 diff --git a/src/simpleimageclassifier/configs/Misc/cascade_mask_rcnn_R_50_FPN_1x.yaml b/src/simpleimageclassifier/configs/Misc/cascade_mask_rcnn_R_50_FPN_1x.yaml new file mode 100644 index 0000000..abb33b6 --- /dev/null +++ b/src/simpleimageclassifier/configs/Misc/cascade_mask_rcnn_R_50_FPN_1x.yaml @@ -0,0 +1,12 @@ +_BASE_: "../Base-RCNN-FPN.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + MASK_ON: True + RESNETS: + DEPTH: 50 + ROI_HEADS: + NAME: CascadeROIHeads + ROI_BOX_HEAD: + CLS_AGNOSTIC_BBOX_REG: True + RPN: + POST_NMS_TOPK_TRAIN: 2000 diff --git a/src/simpleimageclassifier/configs/Misc/cascade_mask_rcnn_R_50_FPN_3x.yaml b/src/simpleimageclassifier/configs/Misc/cascade_mask_rcnn_R_50_FPN_3x.yaml new file mode 100644 index 0000000..e2201ad --- /dev/null +++ b/src/simpleimageclassifier/configs/Misc/cascade_mask_rcnn_R_50_FPN_3x.yaml @@ -0,0 +1,15 @@ +_BASE_: "../Base-RCNN-FPN.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + MASK_ON: True + RESNETS: + DEPTH: 50 + ROI_HEADS: + NAME: CascadeROIHeads + ROI_BOX_HEAD: + CLS_AGNOSTIC_BBOX_REG: True + RPN: + POST_NMS_TOPK_TRAIN: 2000 +SOLVER: + STEPS: (210000, 250000) + MAX_ITER: 270000 diff --git a/src/simpleimageclassifier/configs/Misc/cascade_mask_rcnn_X_152_32x8d_FPN_IN5k_gn_dconv.yaml b/src/simpleimageclassifier/configs/Misc/cascade_mask_rcnn_X_152_32x8d_FPN_IN5k_gn_dconv.yaml new file mode 100644 index 0000000..fc117f6 --- /dev/null +++ b/src/simpleimageclassifier/configs/Misc/cascade_mask_rcnn_X_152_32x8d_FPN_IN5k_gn_dconv.yaml @@ -0,0 +1,36 @@ +_BASE_: "../Base-RCNN-FPN.yaml" +MODEL: + MASK_ON: True + WEIGHTS: "catalog://ImageNetPretrained/FAIR/X-152-32x8d-IN5k" + RESNETS: + STRIDE_IN_1X1: False # this is a C2 model + NUM_GROUPS: 32 + WIDTH_PER_GROUP: 8 + DEPTH: 152 + DEFORM_ON_PER_STAGE: [False, True, True, True] + ROI_HEADS: + NAME: "CascadeROIHeads" + ROI_BOX_HEAD: + NAME: "FastRCNNConvFCHead" + NUM_CONV: 4 + NUM_FC: 1 + NORM: "GN" + CLS_AGNOSTIC_BBOX_REG: True + ROI_MASK_HEAD: + NUM_CONV: 8 + NORM: "GN" + RPN: + POST_NMS_TOPK_TRAIN: 2000 +SOLVER: + IMS_PER_BATCH: 128 + STEPS: (35000, 45000) + MAX_ITER: 50000 + BASE_LR: 0.16 +INPUT: + MIN_SIZE_TRAIN: (640, 864) + MIN_SIZE_TRAIN_SAMPLING: "range" + MAX_SIZE_TRAIN: 1440 + CROP: + ENABLED: True +TEST: + EVAL_PERIOD: 2500 diff --git a/src/simpleimageclassifier/configs/Misc/mask_rcnn_R_50_FPN_1x_cls_agnostic.yaml b/src/simpleimageclassifier/configs/Misc/mask_rcnn_R_50_FPN_1x_cls_agnostic.yaml new file mode 100644 index 0000000..4c3b767 --- /dev/null +++ b/src/simpleimageclassifier/configs/Misc/mask_rcnn_R_50_FPN_1x_cls_agnostic.yaml @@ -0,0 +1,10 @@ +_BASE_: "../Base-RCNN-FPN.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + MASK_ON: True + RESNETS: + DEPTH: 50 + ROI_BOX_HEAD: + CLS_AGNOSTIC_BBOX_REG: True + ROI_MASK_HEAD: + CLS_AGNOSTIC_MASK: True diff --git a/src/simpleimageclassifier/configs/Misc/mask_rcnn_R_50_FPN_1x_dconv_c3-c5.yaml b/src/simpleimageclassifier/configs/Misc/mask_rcnn_R_50_FPN_1x_dconv_c3-c5.yaml new file mode 100644 index 0000000..04ff988 --- /dev/null +++ b/src/simpleimageclassifier/configs/Misc/mask_rcnn_R_50_FPN_1x_dconv_c3-c5.yaml @@ -0,0 +1,8 @@ +_BASE_: "../Base-RCNN-FPN.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + MASK_ON: True + RESNETS: + DEPTH: 50 + DEFORM_ON_PER_STAGE: [False, True, True, True] # on Res3,Res4,Res5 + DEFORM_MODULATED: False diff --git a/src/simpleimageclassifier/configs/Misc/mask_rcnn_R_50_FPN_3x_dconv_c3-c5.yaml b/src/simpleimageclassifier/configs/Misc/mask_rcnn_R_50_FPN_3x_dconv_c3-c5.yaml new file mode 100644 index 0000000..68c0ca5 --- /dev/null +++ b/src/simpleimageclassifier/configs/Misc/mask_rcnn_R_50_FPN_3x_dconv_c3-c5.yaml @@ -0,0 +1,11 @@ +_BASE_: "../Base-RCNN-FPN.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + MASK_ON: True + RESNETS: + DEPTH: 50 + DEFORM_ON_PER_STAGE: [False, True, True, True] # on Res3,Res4,Res5 + DEFORM_MODULATED: False +SOLVER: + STEPS: (210000, 250000) + MAX_ITER: 270000 diff --git a/src/simpleimageclassifier/configs/Misc/mask_rcnn_R_50_FPN_3x_gn.yaml b/src/simpleimageclassifier/configs/Misc/mask_rcnn_R_50_FPN_3x_gn.yaml new file mode 100644 index 0000000..74d274e --- /dev/null +++ b/src/simpleimageclassifier/configs/Misc/mask_rcnn_R_50_FPN_3x_gn.yaml @@ -0,0 +1,21 @@ +_BASE_: "../Base-RCNN-FPN.yaml" +MODEL: + WEIGHTS: "catalog://ImageNetPretrained/FAIR/R-50-GN" + MASK_ON: True + RESNETS: + DEPTH: 50 + NORM: "GN" + STRIDE_IN_1X1: False + FPN: + NORM: "GN" + ROI_BOX_HEAD: + NAME: "FastRCNNConvFCHead" + NUM_CONV: 4 + NUM_FC: 1 + NORM: "GN" + ROI_MASK_HEAD: + NORM: "GN" +SOLVER: + # 3x schedule + STEPS: (210000, 250000) + MAX_ITER: 270000 diff --git a/src/simpleimageclassifier/configs/Misc/mask_rcnn_R_50_FPN_3x_syncbn.yaml b/src/simpleimageclassifier/configs/Misc/mask_rcnn_R_50_FPN_3x_syncbn.yaml new file mode 100644 index 0000000..11ebb07 --- /dev/null +++ b/src/simpleimageclassifier/configs/Misc/mask_rcnn_R_50_FPN_3x_syncbn.yaml @@ -0,0 +1,24 @@ +_BASE_: "../Base-RCNN-FPN.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + MASK_ON: True + RESNETS: + DEPTH: 50 + NORM: "SyncBN" + STRIDE_IN_1X1: True + FPN: + NORM: "SyncBN" + ROI_BOX_HEAD: + NAME: "FastRCNNConvFCHead" + NUM_CONV: 4 + NUM_FC: 1 + NORM: "SyncBN" + ROI_MASK_HEAD: + NORM: "SyncBN" +SOLVER: + # 3x schedule + STEPS: (210000, 250000) + MAX_ITER: 270000 +TEST: + PRECISE_BN: + ENABLED: True diff --git a/src/simpleimageclassifier/configs/Misc/mmdet_mask_rcnn_R_50_FPN_1x.py b/src/simpleimageclassifier/configs/Misc/mmdet_mask_rcnn_R_50_FPN_1x.py new file mode 100644 index 0000000..bdd49a4 --- /dev/null +++ b/src/simpleimageclassifier/configs/Misc/mmdet_mask_rcnn_R_50_FPN_1x.py @@ -0,0 +1,152 @@ +# An example config to train a mmdetection model using detectron2. + +from ..common.data.coco import dataloader +from ..common.coco_schedule import lr_multiplier_1x as lr_multiplier +from ..common.optim import SGD as optimizer +from ..common.train import train +from ..common.data.constants import constants + +from detectron2.modeling.mmdet_wrapper import MMDetDetector +from detectron2.config import LazyCall as L + +model = L(MMDetDetector)( + detector=dict( + type="MaskRCNN", + pretrained="torchvision://resnet50", + backbone=dict( + type="ResNet", + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type="BN", requires_grad=True), + norm_eval=True, + style="pytorch", + ), + neck=dict(type="FPN", in_channels=[256, 512, 1024, 2048], out_channels=256, num_outs=5), + rpn_head=dict( + type="RPNHead", + in_channels=256, + feat_channels=256, + anchor_generator=dict( + type="AnchorGenerator", + scales=[8], + ratios=[0.5, 1.0, 2.0], + strides=[4, 8, 16, 32, 64], + ), + bbox_coder=dict( + type="DeltaXYWHBBoxCoder", + target_means=[0.0, 0.0, 0.0, 0.0], + target_stds=[1.0, 1.0, 1.0, 1.0], + ), + loss_cls=dict(type="CrossEntropyLoss", use_sigmoid=True, loss_weight=1.0), + loss_bbox=dict(type="L1Loss", loss_weight=1.0), + ), + roi_head=dict( + type="StandardRoIHead", + bbox_roi_extractor=dict( + type="SingleRoIExtractor", + roi_layer=dict(type="RoIAlign", output_size=7, sampling_ratio=0), + out_channels=256, + featmap_strides=[4, 8, 16, 32], + ), + bbox_head=dict( + type="Shared2FCBBoxHead", + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type="DeltaXYWHBBoxCoder", + target_means=[0.0, 0.0, 0.0, 0.0], + target_stds=[0.1, 0.1, 0.2, 0.2], + ), + reg_class_agnostic=False, + loss_cls=dict(type="CrossEntropyLoss", use_sigmoid=False, loss_weight=1.0), + loss_bbox=dict(type="L1Loss", loss_weight=1.0), + ), + mask_roi_extractor=dict( + type="SingleRoIExtractor", + roi_layer=dict(type="RoIAlign", output_size=14, sampling_ratio=0), + out_channels=256, + featmap_strides=[4, 8, 16, 32], + ), + mask_head=dict( + type="FCNMaskHead", + num_convs=4, + in_channels=256, + conv_out_channels=256, + num_classes=80, + loss_mask=dict(type="CrossEntropyLoss", use_mask=True, loss_weight=1.0), + ), + ), + # model training and testing settings + train_cfg=dict( + rpn=dict( + assigner=dict( + type="MaxIoUAssigner", + pos_iou_thr=0.7, + neg_iou_thr=0.3, + min_pos_iou=0.3, + match_low_quality=True, + ignore_iof_thr=-1, + ), + sampler=dict( + type="RandomSampler", + num=256, + pos_fraction=0.5, + neg_pos_ub=-1, + add_gt_as_proposals=False, + ), + allowed_border=-1, + pos_weight=-1, + debug=False, + ), + rpn_proposal=dict( + nms_pre=2000, + max_per_img=1000, + nms=dict(type="nms", iou_threshold=0.7), + min_bbox_size=0, + ), + rcnn=dict( + assigner=dict( + type="MaxIoUAssigner", + pos_iou_thr=0.5, + neg_iou_thr=0.5, + min_pos_iou=0.5, + match_low_quality=True, + ignore_iof_thr=-1, + ), + sampler=dict( + type="RandomSampler", + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True, + ), + mask_size=28, + pos_weight=-1, + debug=False, + ), + ), + test_cfg=dict( + rpn=dict( + nms_pre=1000, + max_per_img=1000, + nms=dict(type="nms", iou_threshold=0.7), + min_bbox_size=0, + ), + rcnn=dict( + score_thr=0.05, + nms=dict(type="nms", iou_threshold=0.5), + max_per_img=100, + mask_thr_binary=0.5, + ), + ), + ), + pixel_mean=constants.imagenet_rgb256_mean, + pixel_std=constants.imagenet_rgb256_std, +) + +dataloader.train.mapper.image_format = "RGB" # torchvision pretrained model +train.init_checkpoint = None # pretrained model is loaded inside backbone diff --git a/src/simpleimageclassifier/configs/Misc/panoptic_fpn_R_101_dconv_cascade_gn_3x.yaml b/src/simpleimageclassifier/configs/Misc/panoptic_fpn_R_101_dconv_cascade_gn_3x.yaml new file mode 100644 index 0000000..34016ce --- /dev/null +++ b/src/simpleimageclassifier/configs/Misc/panoptic_fpn_R_101_dconv_cascade_gn_3x.yaml @@ -0,0 +1,26 @@ +# A large PanopticFPN for demo purposes. +# Use GN on backbone to support semantic seg. +# Use Cascade + Deform Conv to improve localization. +_BASE_: "../COCO-PanopticSegmentation/Base-Panoptic-FPN.yaml" +MODEL: + WEIGHTS: "catalog://ImageNetPretrained/FAIR/R-101-GN" + RESNETS: + DEPTH: 101 + NORM: "GN" + DEFORM_ON_PER_STAGE: [False, True, True, True] + STRIDE_IN_1X1: False + FPN: + NORM: "GN" + ROI_HEADS: + NAME: CascadeROIHeads + ROI_BOX_HEAD: + CLS_AGNOSTIC_BBOX_REG: True + ROI_MASK_HEAD: + NORM: "GN" + RPN: + POST_NMS_TOPK_TRAIN: 2000 +SOLVER: + STEPS: (105000, 125000) + MAX_ITER: 135000 + IMS_PER_BATCH: 32 + BASE_LR: 0.04 diff --git a/src/simpleimageclassifier/configs/Misc/scratch_mask_rcnn_R_50_FPN_3x_gn.yaml b/src/simpleimageclassifier/configs/Misc/scratch_mask_rcnn_R_50_FPN_3x_gn.yaml new file mode 100644 index 0000000..f340028 --- /dev/null +++ b/src/simpleimageclassifier/configs/Misc/scratch_mask_rcnn_R_50_FPN_3x_gn.yaml @@ -0,0 +1,13 @@ +_BASE_: "mask_rcnn_R_50_FPN_3x_gn.yaml" +MODEL: + # Train from random initialization. + WEIGHTS: "" + # It makes sense to divide by STD when training from scratch + # But it seems to make no difference on the results and C2's models didn't do this. + # So we keep things consistent with C2. + # PIXEL_STD: [57.375, 57.12, 58.395] + MASK_ON: True + BACKBONE: + FREEZE_AT: 0 +# NOTE: Please refer to Rethinking ImageNet Pre-training https://arxiv.org/abs/1811.08883 +# to learn what you need for training from scratch. diff --git a/src/simpleimageclassifier/configs/Misc/scratch_mask_rcnn_R_50_FPN_9x_gn.yaml b/src/simpleimageclassifier/configs/Misc/scratch_mask_rcnn_R_50_FPN_9x_gn.yaml new file mode 100644 index 0000000..d90c9ff --- /dev/null +++ b/src/simpleimageclassifier/configs/Misc/scratch_mask_rcnn_R_50_FPN_9x_gn.yaml @@ -0,0 +1,19 @@ +_BASE_: "mask_rcnn_R_50_FPN_3x_gn.yaml" +MODEL: + PIXEL_STD: [57.375, 57.12, 58.395] + WEIGHTS: "" + MASK_ON: True + RESNETS: + STRIDE_IN_1X1: False + BACKBONE: + FREEZE_AT: 0 +SOLVER: + # 9x schedule + IMS_PER_BATCH: 64 # 4x the standard + STEPS: (187500, 197500) # last 60/4==15k and last 20/4==5k + MAX_ITER: 202500 # 90k * 9 / 4 + BASE_LR: 0.08 +TEST: + EVAL_PERIOD: 2500 +# NOTE: Please refer to Rethinking ImageNet Pre-training https://arxiv.org/abs/1811.08883 +# to learn what you need for training from scratch. diff --git a/src/simpleimageclassifier/configs/Misc/scratch_mask_rcnn_R_50_FPN_9x_syncbn.yaml b/src/simpleimageclassifier/configs/Misc/scratch_mask_rcnn_R_50_FPN_9x_syncbn.yaml new file mode 100644 index 0000000..60d4e42 --- /dev/null +++ b/src/simpleimageclassifier/configs/Misc/scratch_mask_rcnn_R_50_FPN_9x_syncbn.yaml @@ -0,0 +1,19 @@ +_BASE_: "mask_rcnn_R_50_FPN_3x_syncbn.yaml" +MODEL: + PIXEL_STD: [57.375, 57.12, 58.395] + WEIGHTS: "" + MASK_ON: True + RESNETS: + STRIDE_IN_1X1: False + BACKBONE: + FREEZE_AT: 0 +SOLVER: + # 9x schedule + IMS_PER_BATCH: 64 # 4x the standard + STEPS: (187500, 197500) # last 60/4==15k and last 20/4==5k + MAX_ITER: 202500 # 90k * 9 / 4 + BASE_LR: 0.08 +TEST: + EVAL_PERIOD: 2500 +# NOTE: Please refer to Rethinking ImageNet Pre-training https://arxiv.org/abs/1811.08883 +# to learn what you need for training from scratch. diff --git a/src/simpleimageclassifier/configs/Misc/semantic_R_50_FPN_1x.yaml b/src/simpleimageclassifier/configs/Misc/semantic_R_50_FPN_1x.yaml new file mode 100644 index 0000000..ac256e1 --- /dev/null +++ b/src/simpleimageclassifier/configs/Misc/semantic_R_50_FPN_1x.yaml @@ -0,0 +1,11 @@ +_BASE_: "../Base-RCNN-FPN.yaml" +MODEL: + META_ARCHITECTURE: "SemanticSegmentor" + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + RESNETS: + DEPTH: 50 +DATASETS: + TRAIN: ("coco_2017_train_panoptic_stuffonly",) + TEST: ("coco_2017_val_panoptic_stuffonly",) +INPUT: + MIN_SIZE_TRAIN: (640, 672, 704, 736, 768, 800) diff --git a/src/simpleimageclassifier/configs/Misc/torchvision_imagenet_R_50.py b/src/simpleimageclassifier/configs/Misc/torchvision_imagenet_R_50.py new file mode 100644 index 0000000..0d75305 --- /dev/null +++ b/src/simpleimageclassifier/configs/Misc/torchvision_imagenet_R_50.py @@ -0,0 +1,150 @@ +""" +An example config file to train a ImageNet classifier with detectron2. +Model and dataloader both come from torchvision. +This shows how to use detectron2 as a general engine for any new models and tasks. + +To run, use the following command: + +python tools/lazyconfig_train_net.py --config-file configs/Misc/torchvision_imagenet_R_50.py \ + --num-gpus 8 dataloader.train.dataset.root=/path/to/imagenet/ + +""" + + +import torch +from torch import nn +from torch.nn import functional as F +from omegaconf import OmegaConf +import torchvision +from torchvision.transforms import transforms as T +from torchvision.models.resnet import ResNet, Bottleneck +from fvcore.common.param_scheduler import MultiStepParamScheduler + +from detectron2.solver import WarmupParamScheduler +from detectron2.solver.build import get_default_optimizer_params +from detectron2.config import LazyCall as L +from detectron2.model_zoo import get_config +from detectron2.data.samplers import TrainingSampler, InferenceSampler +from detectron2.evaluation import DatasetEvaluator +from detectron2.utils import comm + + +""" +Note: Here we put reusable code (models, evaluation, data) together with configs just as a +proof-of-concept, to easily demonstrate what's needed to train a ImageNet classifier in detectron2. +Writing code in configs offers extreme flexibility but is often not a good engineering practice. +In practice, you might want to put code in your project and import them instead. +""" + + +def build_data_loader(dataset, batch_size, num_workers, training=True): + return torch.utils.data.DataLoader( + dataset, + sampler=(TrainingSampler if training else InferenceSampler)(len(dataset)), + batch_size=batch_size, + num_workers=num_workers, + pin_memory=True, + ) + + +class ClassificationNet(nn.Module): + def __init__(self, model: nn.Module): + super().__init__() + self.model = model + + @property + def device(self): + return list(self.model.parameters())[0].device + + def forward(self, inputs): + image, label = inputs + pred = self.model(image.to(self.device)) + if self.training: + label = label.to(self.device) + return F.cross_entropy(pred, label) + else: + return pred + + +class ClassificationAcc(DatasetEvaluator): + def reset(self): + self.corr = self.total = 0 + + def process(self, inputs, outputs): + image, label = inputs + self.corr += (outputs.argmax(dim=1).cpu() == label.cpu()).sum().item() + self.total += len(label) + + def evaluate(self): + all_corr_total = comm.all_gather([self.corr, self.total]) + corr = sum(x[0] for x in all_corr_total) + total = sum(x[1] for x in all_corr_total) + return {"accuracy": corr / total} + + +# --- End of code that could be in a project and be imported + + +dataloader = OmegaConf.create() +dataloader.train = L(build_data_loader)( + dataset=L(torchvision.datasets.ImageNet)( + root="/path/to/imagenet", + split="train", + transform=L(T.Compose)( + transforms=[ + L(T.RandomResizedCrop)(size=224), + L(T.RandomHorizontalFlip)(), + T.ToTensor(), + L(T.Normalize)(mean=(0.485, 0.456, 0.406), std=(0.229, 0.224, 0.225)), + ] + ), + ), + batch_size=256 // 8, + num_workers=4, + training=True, +) + +dataloader.test = L(build_data_loader)( + dataset=L(torchvision.datasets.ImageNet)( + root="${...train.dataset.root}", + split="val", + transform=L(T.Compose)( + transforms=[ + L(T.Resize)(size=256), + L(T.CenterCrop)(size=224), + T.ToTensor(), + L(T.Normalize)(mean=(0.485, 0.456, 0.406), std=(0.229, 0.224, 0.225)), + ] + ), + ), + batch_size=256 // 8, + num_workers=4, + training=False, +) + +dataloader.evaluator = L(ClassificationAcc)() + +model = L(ClassificationNet)( + model=(ResNet)(block=Bottleneck, layers=[3, 4, 6, 3], zero_init_residual=True) +) + + +optimizer = L(torch.optim.SGD)( + params=L(get_default_optimizer_params)(), + lr=0.1, + momentum=0.9, + weight_decay=1e-4, +) + +lr_multiplier = L(WarmupParamScheduler)( + scheduler=L(MultiStepParamScheduler)( + values=[1.0, 0.1, 0.01, 0.001], milestones=[30, 60, 90, 100] + ), + warmup_length=1 / 100, + warmup_factor=0.1, +) + + +train = get_config("common/train.py").train +train.init_checkpoint = None +train.max_iter = 100 * 1281167 // 256 diff --git a/src/simpleimageclassifier/configs/PascalVOC-Detection/faster_rcnn_R_50_C4.yaml b/src/simpleimageclassifier/configs/PascalVOC-Detection/faster_rcnn_R_50_C4.yaml new file mode 100644 index 0000000..ea2a6ba --- /dev/null +++ b/src/simpleimageclassifier/configs/PascalVOC-Detection/faster_rcnn_R_50_C4.yaml @@ -0,0 +1,18 @@ +_BASE_: "../Base-RCNN-C4.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + MASK_ON: False + RESNETS: + DEPTH: 50 + ROI_HEADS: + NUM_CLASSES: 20 +INPUT: + MIN_SIZE_TRAIN: (480, 512, 544, 576, 608, 640, 672, 704, 736, 768, 800) + MIN_SIZE_TEST: 800 +DATASETS: + TRAIN: ('voc_2007_trainval', 'voc_2012_trainval') + TEST: ('voc_2007_test',) +SOLVER: + STEPS: (12000, 16000) + MAX_ITER: 18000 # 17.4 epochs + WARMUP_ITERS: 100 diff --git a/src/simpleimageclassifier/configs/PascalVOC-Detection/faster_rcnn_R_50_FPN.yaml b/src/simpleimageclassifier/configs/PascalVOC-Detection/faster_rcnn_R_50_FPN.yaml new file mode 100644 index 0000000..e554cab --- /dev/null +++ b/src/simpleimageclassifier/configs/PascalVOC-Detection/faster_rcnn_R_50_FPN.yaml @@ -0,0 +1,18 @@ +_BASE_: "../Base-RCNN-FPN.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + MASK_ON: False + RESNETS: + DEPTH: 50 + ROI_HEADS: + NUM_CLASSES: 20 +INPUT: + MIN_SIZE_TRAIN: (480, 512, 544, 576, 608, 640, 672, 704, 736, 768, 800) + MIN_SIZE_TEST: 800 +DATASETS: + TRAIN: ('voc_2007_trainval', 'voc_2012_trainval') + TEST: ('voc_2007_test',) +SOLVER: + STEPS: (12000, 16000) + MAX_ITER: 18000 # 17.4 epochs + WARMUP_ITERS: 100 diff --git a/src/simpleimageclassifier/configs/common/README.md b/src/simpleimageclassifier/configs/common/README.md new file mode 100644 index 0000000..912cc29 --- /dev/null +++ b/src/simpleimageclassifier/configs/common/README.md @@ -0,0 +1,6 @@ +This directory provides definitions for a few common models, dataloaders, scheduler, +and optimizers that are often used in training. +The definition of these objects are provided in the form of lazy instantiation: +their arguments can be edited by users before constructing the objects. + +They can be imported, or loaded by `model_zoo.get_config` API in users' own configs. diff --git a/src/simpleimageclassifier/configs/common/coco_schedule.py b/src/simpleimageclassifier/configs/common/coco_schedule.py new file mode 100644 index 0000000..355e66a --- /dev/null +++ b/src/simpleimageclassifier/configs/common/coco_schedule.py @@ -0,0 +1,47 @@ +from fvcore.common.param_scheduler import MultiStepParamScheduler + +from detectron2.config import LazyCall as L +from detectron2.solver import WarmupParamScheduler + + +def default_X_scheduler(num_X): + """ + Returns the config for a default multi-step LR scheduler such as "1x", "3x", + commonly referred to in papers, where every 1x has the total length of 1440k + training images (~12 COCO epochs). LR is decayed twice at the end of training + following the strategy defined in "Rethinking ImageNet Pretraining", Sec 4. + + Args: + num_X: a positive real number + + Returns: + DictConfig: configs that define the multiplier for LR during training + """ + # total number of iterations assuming 16 batch size, using 1440000/16=90000 + total_steps_16bs = num_X * 90000 + + if num_X <= 2: + scheduler = L(MultiStepParamScheduler)( + values=[1.0, 0.1, 0.01], + # note that scheduler is scale-invariant. This is equivalent to + # milestones=[6, 8, 9] + milestones=[60000, 80000, 90000], + ) + else: + scheduler = L(MultiStepParamScheduler)( + values=[1.0, 0.1, 0.01], + milestones=[total_steps_16bs - 60000, total_steps_16bs - 20000, total_steps_16bs], + ) + return L(WarmupParamScheduler)( + scheduler=scheduler, + warmup_length=1000 / total_steps_16bs, + warmup_method="linear", + warmup_factor=0.001, + ) + + +lr_multiplier_1x = default_X_scheduler(1) +lr_multiplier_2x = default_X_scheduler(2) +lr_multiplier_3x = default_X_scheduler(3) +lr_multiplier_6x = default_X_scheduler(6) +lr_multiplier_9x = default_X_scheduler(9) diff --git a/src/simpleimageclassifier/configs/common/data/coco.py b/src/simpleimageclassifier/configs/common/data/coco.py new file mode 100644 index 0000000..703c438 --- /dev/null +++ b/src/simpleimageclassifier/configs/common/data/coco.py @@ -0,0 +1,48 @@ +from omegaconf import OmegaConf + +import detectron2.data.transforms as T +from detectron2.config import LazyCall as L +from detectron2.data import ( + DatasetMapper, + build_detection_test_loader, + build_detection_train_loader, + get_detection_dataset_dicts, +) +from detectron2.evaluation import COCOEvaluator + +dataloader = OmegaConf.create() + +dataloader.train = L(build_detection_train_loader)( + dataset=L(get_detection_dataset_dicts)(names="coco_2017_train"), + mapper=L(DatasetMapper)( + is_train=True, + augmentations=[ + L(T.ResizeShortestEdge)( + short_edge_length=(640, 672, 704, 736, 768, 800), + sample_style="choice", + max_size=1333, + ), + L(T.RandomFlip)(horizontal=True), + ], + image_format="BGR", + use_instance_mask=True, + ), + total_batch_size=16, + num_workers=4, +) + +dataloader.test = L(build_detection_test_loader)( + dataset=L(get_detection_dataset_dicts)(names="coco_2017_val", filter_empty=False), + mapper=L(DatasetMapper)( + is_train=False, + augmentations=[ + L(T.ResizeShortestEdge)(short_edge_length=800, max_size=1333), + ], + image_format="${...train.mapper.image_format}", + ), + num_workers=4, +) + +dataloader.evaluator = L(COCOEvaluator)( + dataset_name="${..test.dataset.names}", +) diff --git a/src/simpleimageclassifier/configs/common/data/coco_keypoint.py b/src/simpleimageclassifier/configs/common/data/coco_keypoint.py new file mode 100644 index 0000000..b4ceb06 --- /dev/null +++ b/src/simpleimageclassifier/configs/common/data/coco_keypoint.py @@ -0,0 +1,13 @@ +from detectron2.data.detection_utils import create_keypoint_hflip_indices + +from .coco import dataloader + +dataloader.train.dataset.min_keypoints = 1 +dataloader.train.dataset.names = "keypoints_coco_2017_train" +dataloader.test.dataset.names = "keypoints_coco_2017_val" + +dataloader.train.mapper.update( + use_instance_mask=False, + use_keypoint=True, + keypoint_hflip_indices=create_keypoint_hflip_indices(dataloader.train.dataset.names), +) diff --git a/src/simpleimageclassifier/configs/common/data/coco_panoptic_separated.py b/src/simpleimageclassifier/configs/common/data/coco_panoptic_separated.py new file mode 100644 index 0000000..5ccbc77 --- /dev/null +++ b/src/simpleimageclassifier/configs/common/data/coco_panoptic_separated.py @@ -0,0 +1,26 @@ +from detectron2.config import LazyCall as L +from detectron2.evaluation import ( + COCOEvaluator, + COCOPanopticEvaluator, + DatasetEvaluators, + SemSegEvaluator, +) + +from .coco import dataloader + +dataloader.train.dataset.names = "coco_2017_train_panoptic_separated" +dataloader.train.dataset.filter_empty = False +dataloader.test.dataset.names = "coco_2017_val_panoptic_separated" + + +dataloader.evaluator = [ + L(COCOEvaluator)( + dataset_name="${...test.dataset.names}", + ), + L(SemSegEvaluator)( + dataset_name="${...test.dataset.names}", + ), + L(COCOPanopticEvaluator)( + dataset_name="${...test.dataset.names}", + ), +] diff --git a/src/simpleimageclassifier/configs/common/data/constants.py b/src/simpleimageclassifier/configs/common/data/constants.py new file mode 100644 index 0000000..be11cb5 --- /dev/null +++ b/src/simpleimageclassifier/configs/common/data/constants.py @@ -0,0 +1,9 @@ +constants = dict( + imagenet_rgb256_mean=[123.675, 116.28, 103.53], + imagenet_rgb256_std=[58.395, 57.12, 57.375], + imagenet_bgr256_mean=[103.530, 116.280, 123.675], + # When using pre-trained models in Detectron1 or any MSRA models, + # std has been absorbed into its conv1 weights, so the std needs to be set 1. + # Otherwise, you can use [57.375, 57.120, 58.395] (ImageNet std) + imagenet_bgr256_std=[1.0, 1.0, 1.0], +) diff --git a/src/simpleimageclassifier/configs/common/models/cascade_rcnn.py b/src/simpleimageclassifier/configs/common/models/cascade_rcnn.py new file mode 100644 index 0000000..c7372a8 --- /dev/null +++ b/src/simpleimageclassifier/configs/common/models/cascade_rcnn.py @@ -0,0 +1,36 @@ +from detectron2.config import LazyCall as L +from detectron2.layers import ShapeSpec +from detectron2.modeling.box_regression import Box2BoxTransform +from detectron2.modeling.matcher import Matcher +from detectron2.modeling.roi_heads import FastRCNNOutputLayers, FastRCNNConvFCHead, CascadeROIHeads + +from .mask_rcnn_fpn import model + +# arguments that don't exist for Cascade R-CNN +[model.roi_heads.pop(k) for k in ["box_head", "box_predictor", "proposal_matcher"]] + +model.roi_heads.update( + _target_=CascadeROIHeads, + box_heads=[ + L(FastRCNNConvFCHead)( + input_shape=ShapeSpec(channels=256, height=7, width=7), + conv_dims=[], + fc_dims=[1024, 1024], + ) + for k in range(3) + ], + box_predictors=[ + L(FastRCNNOutputLayers)( + input_shape=ShapeSpec(channels=1024), + test_score_thresh=0.05, + box2box_transform=L(Box2BoxTransform)(weights=(w1, w1, w2, w2)), + cls_agnostic_bbox_reg=True, + num_classes="${...num_classes}", + ) + for (w1, w2) in [(10, 5), (20, 10), (30, 15)] + ], + proposal_matchers=[ + L(Matcher)(thresholds=[th], labels=[0, 1], allow_low_quality_matches=False) + for th in [0.5, 0.6, 0.7] + ], +) diff --git a/src/simpleimageclassifier/configs/common/models/fcos.py b/src/simpleimageclassifier/configs/common/models/fcos.py new file mode 100644 index 0000000..1c75202 --- /dev/null +++ b/src/simpleimageclassifier/configs/common/models/fcos.py @@ -0,0 +1,23 @@ +from detectron2.modeling.meta_arch.fcos import FCOS, FCOSHead + +from .retinanet import model + +model._target_ = FCOS + +del model.anchor_generator +del model.box2box_transform +del model.anchor_matcher +del model.input_format + +# Use P5 instead of C5 to compute P6/P7 +# (Sec 2.2 of https://arxiv.org/abs/2006.09214) +model.backbone.top_block.in_feature = "p5" +model.backbone.top_block.in_channels = 256 + +# New score threshold determined based on sqrt(cls_score * centerness) +model.test_score_thresh = 0.2 +model.test_nms_thresh = 0.6 + +model.head._target_ = FCOSHead +del model.head.num_anchors +model.head.norm = "GN" diff --git a/src/simpleimageclassifier/configs/common/models/keypoint_rcnn_fpn.py b/src/simpleimageclassifier/configs/common/models/keypoint_rcnn_fpn.py new file mode 100644 index 0000000..56b3994 --- /dev/null +++ b/src/simpleimageclassifier/configs/common/models/keypoint_rcnn_fpn.py @@ -0,0 +1,33 @@ +from detectron2.config import LazyCall as L +from detectron2.layers import ShapeSpec +from detectron2.modeling.poolers import ROIPooler +from detectron2.modeling.roi_heads import KRCNNConvDeconvUpsampleHead + +from .mask_rcnn_fpn import model + +[model.roi_heads.pop(x) for x in ["mask_in_features", "mask_pooler", "mask_head"]] + +model.roi_heads.update( + num_classes=1, + keypoint_in_features=["p2", "p3", "p4", "p5"], + keypoint_pooler=L(ROIPooler)( + output_size=14, + scales=(1.0 / 4, 1.0 / 8, 1.0 / 16, 1.0 / 32), + sampling_ratio=0, + pooler_type="ROIAlignV2", + ), + keypoint_head=L(KRCNNConvDeconvUpsampleHead)( + input_shape=ShapeSpec(channels=256, width=14, height=14), + num_keypoints=17, + conv_dims=[512] * 8, + loss_normalizer="visible", + ), +) + +# Detectron1 uses 2000 proposals per-batch, but this option is per-image in detectron2. +# 1000 proposals per-image is found to hurt box AP. +# Therefore we increase it to 1500 per-image. +model.proposal_generator.post_nms_topk = (1500, 1000) + +# Keypoint AP degrades (though box AP improves) when using plain L1 loss +model.roi_heads.box_predictor.smooth_l1_beta = 0.5 diff --git a/src/simpleimageclassifier/configs/common/models/mask_rcnn_c4.py b/src/simpleimageclassifier/configs/common/models/mask_rcnn_c4.py new file mode 100644 index 0000000..902d5b1 --- /dev/null +++ b/src/simpleimageclassifier/configs/common/models/mask_rcnn_c4.py @@ -0,0 +1,90 @@ +from detectron2.config import LazyCall as L +from detectron2.layers import ShapeSpec +from detectron2.modeling.meta_arch import GeneralizedRCNN +from detectron2.modeling.anchor_generator import DefaultAnchorGenerator +from detectron2.modeling.backbone import BasicStem, BottleneckBlock, ResNet +from detectron2.modeling.box_regression import Box2BoxTransform +from detectron2.modeling.matcher import Matcher +from detectron2.modeling.poolers import ROIPooler +from detectron2.modeling.proposal_generator import RPN, StandardRPNHead +from detectron2.modeling.roi_heads import ( + FastRCNNOutputLayers, + MaskRCNNConvUpsampleHead, + Res5ROIHeads, +) + +from ..data.constants import constants + +model = L(GeneralizedRCNN)( + backbone=L(ResNet)( + stem=L(BasicStem)(in_channels=3, out_channels=64, norm="FrozenBN"), + stages=L(ResNet.make_default_stages)( + depth=50, + stride_in_1x1=True, + norm="FrozenBN", + ), + out_features=["res4"], + ), + proposal_generator=L(RPN)( + in_features=["res4"], + head=L(StandardRPNHead)(in_channels=1024, num_anchors=15), + anchor_generator=L(DefaultAnchorGenerator)( + sizes=[[32, 64, 128, 256, 512]], + aspect_ratios=[0.5, 1.0, 2.0], + strides=[16], + offset=0.0, + ), + anchor_matcher=L(Matcher)( + thresholds=[0.3, 0.7], labels=[0, -1, 1], allow_low_quality_matches=True + ), + box2box_transform=L(Box2BoxTransform)(weights=[1.0, 1.0, 1.0, 1.0]), + batch_size_per_image=256, + positive_fraction=0.5, + pre_nms_topk=(12000, 6000), + post_nms_topk=(2000, 1000), + nms_thresh=0.7, + ), + roi_heads=L(Res5ROIHeads)( + num_classes=80, + batch_size_per_image=512, + positive_fraction=0.25, + proposal_matcher=L(Matcher)( + thresholds=[0.5], labels=[0, 1], allow_low_quality_matches=False + ), + in_features=["res4"], + pooler=L(ROIPooler)( + output_size=14, + scales=(1.0 / 16,), + sampling_ratio=0, + pooler_type="ROIAlignV2", + ), + res5=L(ResNet.make_stage)( + block_class=BottleneckBlock, + num_blocks=3, + stride_per_block=[2, 1, 1], + in_channels=1024, + bottleneck_channels=512, + out_channels=2048, + norm="FrozenBN", + stride_in_1x1=True, + ), + box_predictor=L(FastRCNNOutputLayers)( + input_shape=L(ShapeSpec)(channels="${...res5.out_channels}", height=1, width=1), + test_score_thresh=0.05, + box2box_transform=L(Box2BoxTransform)(weights=(10, 10, 5, 5)), + num_classes="${..num_classes}", + ), + mask_head=L(MaskRCNNConvUpsampleHead)( + input_shape=L(ShapeSpec)( + channels="${...res5.out_channels}", + width="${...pooler.output_size}", + height="${...pooler.output_size}", + ), + num_classes="${..num_classes}", + conv_dims=[256], + ), + ), + pixel_mean=constants.imagenet_bgr256_mean, + pixel_std=constants.imagenet_bgr256_std, + input_format="BGR", +) diff --git a/src/simpleimageclassifier/configs/common/models/mask_rcnn_fpn.py b/src/simpleimageclassifier/configs/common/models/mask_rcnn_fpn.py new file mode 100644 index 0000000..5e5c501 --- /dev/null +++ b/src/simpleimageclassifier/configs/common/models/mask_rcnn_fpn.py @@ -0,0 +1,95 @@ +from detectron2.config import LazyCall as L +from detectron2.layers import ShapeSpec +from detectron2.modeling.meta_arch import GeneralizedRCNN +from detectron2.modeling.anchor_generator import DefaultAnchorGenerator +from detectron2.modeling.backbone.fpn import LastLevelMaxPool +from detectron2.modeling.backbone import BasicStem, FPN, ResNet +from detectron2.modeling.box_regression import Box2BoxTransform +from detectron2.modeling.matcher import Matcher +from detectron2.modeling.poolers import ROIPooler +from detectron2.modeling.proposal_generator import RPN, StandardRPNHead +from detectron2.modeling.roi_heads import ( + StandardROIHeads, + FastRCNNOutputLayers, + MaskRCNNConvUpsampleHead, + FastRCNNConvFCHead, +) + +from ..data.constants import constants + +model = L(GeneralizedRCNN)( + backbone=L(FPN)( + bottom_up=L(ResNet)( + stem=L(BasicStem)(in_channels=3, out_channels=64, norm="FrozenBN"), + stages=L(ResNet.make_default_stages)( + depth=50, + stride_in_1x1=True, + norm="FrozenBN", + ), + out_features=["res2", "res3", "res4", "res5"], + ), + in_features="${.bottom_up.out_features}", + out_channels=256, + top_block=L(LastLevelMaxPool)(), + ), + proposal_generator=L(RPN)( + in_features=["p2", "p3", "p4", "p5", "p6"], + head=L(StandardRPNHead)(in_channels=256, num_anchors=3), + anchor_generator=L(DefaultAnchorGenerator)( + sizes=[[32], [64], [128], [256], [512]], + aspect_ratios=[0.5, 1.0, 2.0], + strides=[4, 8, 16, 32, 64], + offset=0.0, + ), + anchor_matcher=L(Matcher)( + thresholds=[0.3, 0.7], labels=[0, -1, 1], allow_low_quality_matches=True + ), + box2box_transform=L(Box2BoxTransform)(weights=[1.0, 1.0, 1.0, 1.0]), + batch_size_per_image=256, + positive_fraction=0.5, + pre_nms_topk=(2000, 1000), + post_nms_topk=(1000, 1000), + nms_thresh=0.7, + ), + roi_heads=L(StandardROIHeads)( + num_classes=80, + batch_size_per_image=512, + positive_fraction=0.25, + proposal_matcher=L(Matcher)( + thresholds=[0.5], labels=[0, 1], allow_low_quality_matches=False + ), + box_in_features=["p2", "p3", "p4", "p5"], + box_pooler=L(ROIPooler)( + output_size=7, + scales=(1.0 / 4, 1.0 / 8, 1.0 / 16, 1.0 / 32), + sampling_ratio=0, + pooler_type="ROIAlignV2", + ), + box_head=L(FastRCNNConvFCHead)( + input_shape=ShapeSpec(channels=256, height=7, width=7), + conv_dims=[], + fc_dims=[1024, 1024], + ), + box_predictor=L(FastRCNNOutputLayers)( + input_shape=ShapeSpec(channels=1024), + test_score_thresh=0.05, + box2box_transform=L(Box2BoxTransform)(weights=(10, 10, 5, 5)), + num_classes="${..num_classes}", + ), + mask_in_features=["p2", "p3", "p4", "p5"], + mask_pooler=L(ROIPooler)( + output_size=14, + scales=(1.0 / 4, 1.0 / 8, 1.0 / 16, 1.0 / 32), + sampling_ratio=0, + pooler_type="ROIAlignV2", + ), + mask_head=L(MaskRCNNConvUpsampleHead)( + input_shape=ShapeSpec(channels=256, width=14, height=14), + num_classes="${..num_classes}", + conv_dims=[256, 256, 256, 256, 256], + ), + ), + pixel_mean=constants.imagenet_bgr256_mean, + pixel_std=constants.imagenet_bgr256_std, + input_format="BGR", +) diff --git a/src/simpleimageclassifier/configs/common/models/mask_rcnn_vitdet.py b/src/simpleimageclassifier/configs/common/models/mask_rcnn_vitdet.py new file mode 100644 index 0000000..d6f5244 --- /dev/null +++ b/src/simpleimageclassifier/configs/common/models/mask_rcnn_vitdet.py @@ -0,0 +1,59 @@ +from functools import partial +import torch.nn as nn +from detectron2.config import LazyCall as L +from detectron2.modeling import ViT, SimpleFeaturePyramid +from detectron2.modeling.backbone.fpn import LastLevelMaxPool + +from .mask_rcnn_fpn import model +from ..data.constants import constants + +model.pixel_mean = constants.imagenet_rgb256_mean +model.pixel_std = constants.imagenet_rgb256_std +model.input_format = "RGB" + +# Base +embed_dim, depth, num_heads, dp = 768, 12, 12, 0.1 +# Creates Simple Feature Pyramid from ViT backbone +model.backbone = L(SimpleFeaturePyramid)( + net=L(ViT)( # Single-scale ViT backbone + img_size=1024, + patch_size=16, + embed_dim=embed_dim, + depth=depth, + num_heads=num_heads, + drop_path_rate=dp, + window_size=14, + mlp_ratio=4, + qkv_bias=True, + norm_layer=partial(nn.LayerNorm, eps=1e-6), + window_block_indexes=[ + # 2, 5, 8 11 for global attention + 0, + 1, + 3, + 4, + 6, + 7, + 9, + 10, + ], + residual_block_indexes=[], + use_rel_pos=True, + out_feature="last_feat", + ), + in_feature="${.net.out_feature}", + out_channels=256, + scale_factors=(4.0, 2.0, 1.0, 0.5), + top_block=L(LastLevelMaxPool)(), + norm="LN", + square_pad=1024, +) + +model.roi_heads.box_head.conv_norm = model.roi_heads.mask_head.conv_norm = "LN" + +# 2conv in RPN: +model.proposal_generator.head.conv_dims = [-1, -1] + +# 4conv1fc box head +model.roi_heads.box_head.conv_dims = [256, 256, 256, 256] +model.roi_heads.box_head.fc_dims = [1024] diff --git a/src/simpleimageclassifier/configs/common/models/panoptic_fpn.py b/src/simpleimageclassifier/configs/common/models/panoptic_fpn.py new file mode 100644 index 0000000..88f55d2 --- /dev/null +++ b/src/simpleimageclassifier/configs/common/models/panoptic_fpn.py @@ -0,0 +1,20 @@ +from detectron2.config import LazyCall as L +from detectron2.layers import ShapeSpec +from detectron2.modeling import PanopticFPN +from detectron2.modeling.meta_arch.semantic_seg import SemSegFPNHead + +from .mask_rcnn_fpn import model + +model._target_ = PanopticFPN +model.sem_seg_head = L(SemSegFPNHead)( + input_shape={ + f: L(ShapeSpec)(stride=s, channels="${....backbone.out_channels}") + for f, s in zip(["p2", "p3", "p4", "p5"], [4, 8, 16, 32]) + }, + ignore_value=255, + num_classes=54, # COCO stuff + 1 + conv_dims=128, + common_stride=4, + loss_weight=0.5, + norm="GN", +) diff --git a/src/simpleimageclassifier/configs/common/models/retinanet.py b/src/simpleimageclassifier/configs/common/models/retinanet.py new file mode 100644 index 0000000..784e531 --- /dev/null +++ b/src/simpleimageclassifier/configs/common/models/retinanet.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- + +from detectron2.config import LazyCall as L +from detectron2.layers import ShapeSpec +from detectron2.modeling.meta_arch import RetinaNet +from detectron2.modeling.anchor_generator import DefaultAnchorGenerator +from detectron2.modeling.backbone.fpn import LastLevelP6P7 +from detectron2.modeling.backbone import BasicStem, FPN, ResNet +from detectron2.modeling.box_regression import Box2BoxTransform +from detectron2.modeling.matcher import Matcher +from detectron2.modeling.meta_arch.retinanet import RetinaNetHead + +from ..data.constants import constants + +model = L(RetinaNet)( + backbone=L(FPN)( + bottom_up=L(ResNet)( + stem=L(BasicStem)(in_channels=3, out_channels=64, norm="FrozenBN"), + stages=L(ResNet.make_default_stages)( + depth=50, + stride_in_1x1=True, + norm="FrozenBN", + ), + out_features=["res3", "res4", "res5"], + ), + in_features=["res3", "res4", "res5"], + out_channels=256, + top_block=L(LastLevelP6P7)(in_channels=2048, out_channels="${..out_channels}"), + ), + head=L(RetinaNetHead)( + # Shape for each input feature map + input_shape=[ShapeSpec(channels=256)] * 5, + num_classes="${..num_classes}", + conv_dims=[256, 256, 256, 256], + prior_prob=0.01, + num_anchors=9, + ), + anchor_generator=L(DefaultAnchorGenerator)( + sizes=[[x, x * 2 ** (1.0 / 3), x * 2 ** (2.0 / 3)] for x in [32, 64, 128, 256, 512]], + aspect_ratios=[0.5, 1.0, 2.0], + strides=[8, 16, 32, 64, 128], + offset=0.0, + ), + box2box_transform=L(Box2BoxTransform)(weights=[1.0, 1.0, 1.0, 1.0]), + anchor_matcher=L(Matcher)( + thresholds=[0.4, 0.5], labels=[0, -1, 1], allow_low_quality_matches=True + ), + num_classes=80, + head_in_features=["p3", "p4", "p5", "p6", "p7"], + focal_loss_alpha=0.25, + focal_loss_gamma=2.0, + pixel_mean=constants.imagenet_bgr256_mean, + pixel_std=constants.imagenet_bgr256_std, + input_format="BGR", +) diff --git a/src/simpleimageclassifier/configs/common/optim.py b/src/simpleimageclassifier/configs/common/optim.py new file mode 100644 index 0000000..6cf43e8 --- /dev/null +++ b/src/simpleimageclassifier/configs/common/optim.py @@ -0,0 +1,28 @@ +import torch + +from detectron2.config import LazyCall as L +from detectron2.solver.build import get_default_optimizer_params + +SGD = L(torch.optim.SGD)( + params=L(get_default_optimizer_params)( + # params.model is meant to be set to the model object, before instantiating + # the optimizer. + weight_decay_norm=0.0 + ), + lr=0.02, + momentum=0.9, + weight_decay=1e-4, +) + + +AdamW = L(torch.optim.AdamW)( + params=L(get_default_optimizer_params)( + # params.model is meant to be set to the model object, before instantiating + # the optimizer. + base_lr="${..lr}", + weight_decay_norm=0.0, + ), + lr=1e-4, + betas=(0.9, 0.999), + weight_decay=0.1, +) diff --git a/src/simpleimageclassifier/configs/common/train.py b/src/simpleimageclassifier/configs/common/train.py new file mode 100644 index 0000000..b6ed02b --- /dev/null +++ b/src/simpleimageclassifier/configs/common/train.py @@ -0,0 +1,18 @@ +# Common training-related configs that are designed for "tools/lazyconfig_train_net.py" +# You can use your own instead, together with your own train_net.py +train = dict( + output_dir="./output", + init_checkpoint="", + max_iter=90000, + amp=dict(enabled=False), # options for Automatic Mixed Precision + ddp=dict( # options for DistributedDataParallel + broadcast_buffers=False, + find_unused_parameters=False, + fp16_compression=False, + ), + checkpointer=dict(period=5000, max_to_keep=100), # options for PeriodicCheckpointer + eval_period=5000, + log_period=20, + device="cuda" + # ... +) diff --git a/src/simpleimageclassifier/configs/new_baselines/mask_rcnn_R_101_FPN_100ep_LSJ.py b/src/simpleimageclassifier/configs/new_baselines/mask_rcnn_R_101_FPN_100ep_LSJ.py new file mode 100644 index 0000000..3740e9b --- /dev/null +++ b/src/simpleimageclassifier/configs/new_baselines/mask_rcnn_R_101_FPN_100ep_LSJ.py @@ -0,0 +1,9 @@ +from .mask_rcnn_R_50_FPN_100ep_LSJ import ( + dataloader, + lr_multiplier, + model, + optimizer, + train, +) + +model.backbone.bottom_up.stages.depth = 101 diff --git a/src/simpleimageclassifier/configs/new_baselines/mask_rcnn_R_101_FPN_200ep_LSJ.py b/src/simpleimageclassifier/configs/new_baselines/mask_rcnn_R_101_FPN_200ep_LSJ.py new file mode 100644 index 0000000..18e5f07 --- /dev/null +++ b/src/simpleimageclassifier/configs/new_baselines/mask_rcnn_R_101_FPN_200ep_LSJ.py @@ -0,0 +1,14 @@ +from .mask_rcnn_R_101_FPN_100ep_LSJ import ( + dataloader, + lr_multiplier, + model, + optimizer, + train, +) + +train.max_iter *= 2 # 100ep -> 200ep + +lr_multiplier.scheduler.milestones = [ + milestone * 2 for milestone in lr_multiplier.scheduler.milestones +] +lr_multiplier.scheduler.num_updates = train.max_iter diff --git a/src/simpleimageclassifier/configs/new_baselines/mask_rcnn_R_101_FPN_400ep_LSJ.py b/src/simpleimageclassifier/configs/new_baselines/mask_rcnn_R_101_FPN_400ep_LSJ.py new file mode 100644 index 0000000..63c54ee --- /dev/null +++ b/src/simpleimageclassifier/configs/new_baselines/mask_rcnn_R_101_FPN_400ep_LSJ.py @@ -0,0 +1,14 @@ +from .mask_rcnn_R_101_FPN_100ep_LSJ import ( + dataloader, + lr_multiplier, + model, + optimizer, + train, +) + +train.max_iter *= 4 # 100ep -> 400ep + +lr_multiplier.scheduler.milestones = [ + milestone * 4 for milestone in lr_multiplier.scheduler.milestones +] +lr_multiplier.scheduler.num_updates = train.max_iter diff --git a/src/simpleimageclassifier/configs/new_baselines/mask_rcnn_R_50_FPN_100ep_LSJ.py b/src/simpleimageclassifier/configs/new_baselines/mask_rcnn_R_50_FPN_100ep_LSJ.py new file mode 100644 index 0000000..df7a2ae --- /dev/null +++ b/src/simpleimageclassifier/configs/new_baselines/mask_rcnn_R_50_FPN_100ep_LSJ.py @@ -0,0 +1,72 @@ +import detectron2.data.transforms as T +from detectron2.config.lazy import LazyCall as L +from detectron2.layers.batch_norm import NaiveSyncBatchNorm +from detectron2.solver import WarmupParamScheduler +from fvcore.common.param_scheduler import MultiStepParamScheduler + +from ..common.data.coco import dataloader +from ..common.models.mask_rcnn_fpn import model +from ..common.optim import SGD as optimizer +from ..common.train import train + +# train from scratch +train.init_checkpoint = "" +train.amp.enabled = True +train.ddp.fp16_compression = True +model.backbone.bottom_up.freeze_at = 0 + +# SyncBN +# fmt: off +model.backbone.bottom_up.stem.norm = \ + model.backbone.bottom_up.stages.norm = \ + model.backbone.norm = "SyncBN" + +# Using NaiveSyncBatchNorm becase heads may have empty input. That is not supported by +# torch.nn.SyncBatchNorm. We can remove this after +# https://github.com/pytorch/pytorch/issues/36530 is fixed. +model.roi_heads.box_head.conv_norm = \ + model.roi_heads.mask_head.conv_norm = lambda c: NaiveSyncBatchNorm(c, + stats_mode="N") +# fmt: on + +# 2conv in RPN: +# https://github.com/tensorflow/tpu/blob/b24729de804fdb751b06467d3dce0637fa652060/models/official/detection/modeling/architecture/heads.py#L95-L97 # noqa: E501, B950 +model.proposal_generator.head.conv_dims = [-1, -1] + +# 4conv1fc box head +model.roi_heads.box_head.conv_dims = [256, 256, 256, 256] +model.roi_heads.box_head.fc_dims = [1024] + +# resize_and_crop_image in: +# https://github.com/tensorflow/tpu/blob/b24729de804fdb751b06467d3dce0637fa652060/models/official/detection/utils/input_utils.py#L127 # noqa: E501, B950 +image_size = 1024 +dataloader.train.mapper.augmentations = [ + L(T.ResizeScale)( + min_scale=0.1, max_scale=2.0, target_height=image_size, target_width=image_size + ), + L(T.FixedSizeCrop)(crop_size=(image_size, image_size)), + L(T.RandomFlip)(horizontal=True), +] + +# recompute boxes due to cropping +dataloader.train.mapper.recompute_boxes = True + +# larger batch-size. +dataloader.train.total_batch_size = 64 + +# Equivalent to 100 epochs. +# 100 ep = 184375 iters * 64 images/iter / 118000 images/ep +train.max_iter = 184375 + +lr_multiplier = L(WarmupParamScheduler)( + scheduler=L(MultiStepParamScheduler)( + values=[1.0, 0.1, 0.01], + milestones=[163889, 177546], + num_updates=train.max_iter, + ), + warmup_length=500 / train.max_iter, + warmup_factor=0.067, +) + +optimizer.lr = 0.1 +optimizer.weight_decay = 4e-5 diff --git a/src/simpleimageclassifier/configs/new_baselines/mask_rcnn_R_50_FPN_200ep_LSJ.py b/src/simpleimageclassifier/configs/new_baselines/mask_rcnn_R_50_FPN_200ep_LSJ.py new file mode 100644 index 0000000..2a7c376 --- /dev/null +++ b/src/simpleimageclassifier/configs/new_baselines/mask_rcnn_R_50_FPN_200ep_LSJ.py @@ -0,0 +1,14 @@ +from .mask_rcnn_R_50_FPN_100ep_LSJ import ( + dataloader, + lr_multiplier, + model, + optimizer, + train, +) + +train.max_iter *= 2 # 100ep -> 200ep + +lr_multiplier.scheduler.milestones = [ + milestone * 2 for milestone in lr_multiplier.scheduler.milestones +] +lr_multiplier.scheduler.num_updates = train.max_iter diff --git a/src/simpleimageclassifier/configs/new_baselines/mask_rcnn_R_50_FPN_400ep_LSJ.py b/src/simpleimageclassifier/configs/new_baselines/mask_rcnn_R_50_FPN_400ep_LSJ.py new file mode 100644 index 0000000..97586b8 --- /dev/null +++ b/src/simpleimageclassifier/configs/new_baselines/mask_rcnn_R_50_FPN_400ep_LSJ.py @@ -0,0 +1,14 @@ +from .mask_rcnn_R_50_FPN_100ep_LSJ import ( + dataloader, + lr_multiplier, + model, + optimizer, + train, +) + +train.max_iter *= 4 # 100ep -> 400ep + +lr_multiplier.scheduler.milestones = [ + milestone * 4 for milestone in lr_multiplier.scheduler.milestones +] +lr_multiplier.scheduler.num_updates = train.max_iter diff --git a/src/simpleimageclassifier/configs/new_baselines/mask_rcnn_R_50_FPN_50ep_LSJ.py b/src/simpleimageclassifier/configs/new_baselines/mask_rcnn_R_50_FPN_50ep_LSJ.py new file mode 100644 index 0000000..2ca1ede --- /dev/null +++ b/src/simpleimageclassifier/configs/new_baselines/mask_rcnn_R_50_FPN_50ep_LSJ.py @@ -0,0 +1,14 @@ +from .mask_rcnn_R_50_FPN_100ep_LSJ import ( + dataloader, + lr_multiplier, + model, + optimizer, + train, +) + +train.max_iter //= 2 # 100ep -> 50ep + +lr_multiplier.scheduler.milestones = [ + milestone // 2 for milestone in lr_multiplier.scheduler.milestones +] +lr_multiplier.scheduler.num_updates = train.max_iter diff --git a/src/simpleimageclassifier/configs/new_baselines/mask_rcnn_regnetx_4gf_dds_FPN_100ep_LSJ.py b/src/simpleimageclassifier/configs/new_baselines/mask_rcnn_regnetx_4gf_dds_FPN_100ep_LSJ.py new file mode 100644 index 0000000..ef0b6d1 --- /dev/null +++ b/src/simpleimageclassifier/configs/new_baselines/mask_rcnn_regnetx_4gf_dds_FPN_100ep_LSJ.py @@ -0,0 +1,29 @@ +from .mask_rcnn_R_50_FPN_100ep_LSJ import ( + dataloader, + lr_multiplier, + model, + optimizer, + train, +) +from detectron2.config import LazyCall as L +from detectron2.modeling.backbone import RegNet +from detectron2.modeling.backbone.regnet import SimpleStem, ResBottleneckBlock + +# Config source: +# https://github.com/facebookresearch/detectron2/blob/main/configs/COCO-InstanceSegmentation/mask_rcnn_regnetx_4gf_dds_fpn_1x.py # noqa +model.backbone.bottom_up = L(RegNet)( + stem_class=SimpleStem, + stem_width=32, + block_class=ResBottleneckBlock, + depth=23, + w_a=38.65, + w_0=96, + w_m=2.43, + group_width=40, + norm="SyncBN", + out_features=["s1", "s2", "s3", "s4"], +) +model.pixel_std = [57.375, 57.120, 58.395] + +# RegNets benefit from enabling cudnn benchmark mode +train.cudnn_benchmark = True diff --git a/src/simpleimageclassifier/configs/new_baselines/mask_rcnn_regnetx_4gf_dds_FPN_200ep_LSJ.py b/src/simpleimageclassifier/configs/new_baselines/mask_rcnn_regnetx_4gf_dds_FPN_200ep_LSJ.py new file mode 100644 index 0000000..731320e --- /dev/null +++ b/src/simpleimageclassifier/configs/new_baselines/mask_rcnn_regnetx_4gf_dds_FPN_200ep_LSJ.py @@ -0,0 +1,14 @@ +from .mask_rcnn_regnetx_4gf_dds_FPN_100ep_LSJ import ( + dataloader, + lr_multiplier, + model, + optimizer, + train, +) + +train.max_iter *= 2 # 100ep -> 200ep + +lr_multiplier.scheduler.milestones = [ + milestone * 2 for milestone in lr_multiplier.scheduler.milestones +] +lr_multiplier.scheduler.num_updates = train.max_iter diff --git a/src/simpleimageclassifier/configs/new_baselines/mask_rcnn_regnetx_4gf_dds_FPN_400ep_LSJ.py b/src/simpleimageclassifier/configs/new_baselines/mask_rcnn_regnetx_4gf_dds_FPN_400ep_LSJ.py new file mode 100644 index 0000000..8f369a2 --- /dev/null +++ b/src/simpleimageclassifier/configs/new_baselines/mask_rcnn_regnetx_4gf_dds_FPN_400ep_LSJ.py @@ -0,0 +1,14 @@ +from .mask_rcnn_regnetx_4gf_dds_FPN_100ep_LSJ import ( + dataloader, + lr_multiplier, + model, + optimizer, + train, +) + +train.max_iter *= 4 # 100ep -> 400ep + +lr_multiplier.scheduler.milestones = [ + milestone * 4 for milestone in lr_multiplier.scheduler.milestones +] +lr_multiplier.scheduler.num_updates = train.max_iter diff --git a/src/simpleimageclassifier/configs/new_baselines/mask_rcnn_regnety_4gf_dds_FPN_100ep_LSJ.py b/src/simpleimageclassifier/configs/new_baselines/mask_rcnn_regnety_4gf_dds_FPN_100ep_LSJ.py new file mode 100644 index 0000000..ba2c327 --- /dev/null +++ b/src/simpleimageclassifier/configs/new_baselines/mask_rcnn_regnety_4gf_dds_FPN_100ep_LSJ.py @@ -0,0 +1,30 @@ +from .mask_rcnn_R_50_FPN_100ep_LSJ import ( + dataloader, + lr_multiplier, + model, + optimizer, + train, +) +from detectron2.config import LazyCall as L +from detectron2.modeling.backbone import RegNet +from detectron2.modeling.backbone.regnet import SimpleStem, ResBottleneckBlock + +# Config source: +# https://github.com/facebookresearch/detectron2/blob/main/configs/COCO-InstanceSegmentation/mask_rcnn_regnety_4gf_dds_fpn_1x.py # noqa +model.backbone.bottom_up = L(RegNet)( + stem_class=SimpleStem, + stem_width=32, + block_class=ResBottleneckBlock, + depth=22, + w_a=31.41, + w_0=96, + w_m=2.24, + group_width=64, + se_ratio=0.25, + norm="SyncBN", + out_features=["s1", "s2", "s3", "s4"], +) +model.pixel_std = [57.375, 57.120, 58.395] + +# RegNets benefit from enabling cudnn benchmark mode +train.cudnn_benchmark = True diff --git a/src/simpleimageclassifier/configs/new_baselines/mask_rcnn_regnety_4gf_dds_FPN_200ep_LSJ.py b/src/simpleimageclassifier/configs/new_baselines/mask_rcnn_regnety_4gf_dds_FPN_200ep_LSJ.py new file mode 100644 index 0000000..b867cc8 --- /dev/null +++ b/src/simpleimageclassifier/configs/new_baselines/mask_rcnn_regnety_4gf_dds_FPN_200ep_LSJ.py @@ -0,0 +1,14 @@ +from .mask_rcnn_regnety_4gf_dds_FPN_100ep_LSJ import ( + dataloader, + lr_multiplier, + model, + optimizer, + train, +) + +train.max_iter *= 2 # 100ep -> 200ep + +lr_multiplier.scheduler.milestones = [ + milestone * 2 for milestone in lr_multiplier.scheduler.milestones +] +lr_multiplier.scheduler.num_updates = train.max_iter diff --git a/src/simpleimageclassifier/configs/new_baselines/mask_rcnn_regnety_4gf_dds_FPN_400ep_LSJ.py b/src/simpleimageclassifier/configs/new_baselines/mask_rcnn_regnety_4gf_dds_FPN_400ep_LSJ.py new file mode 100644 index 0000000..7b86ea8 --- /dev/null +++ b/src/simpleimageclassifier/configs/new_baselines/mask_rcnn_regnety_4gf_dds_FPN_400ep_LSJ.py @@ -0,0 +1,14 @@ +from .mask_rcnn_regnety_4gf_dds_FPN_100ep_LSJ import ( + dataloader, + lr_multiplier, + model, + optimizer, + train, +) + +train.max_iter *= 4 # 100ep -> 400ep + +lr_multiplier.scheduler.milestones = [ + milestone * 4 for milestone in lr_multiplier.scheduler.milestones +] +lr_multiplier.scheduler.num_updates = train.max_iter diff --git a/src/simpleimageclassifier/configs/quick_schedules/README.md b/src/simpleimageclassifier/configs/quick_schedules/README.md new file mode 100644 index 0000000..4e6c82e --- /dev/null +++ b/src/simpleimageclassifier/configs/quick_schedules/README.md @@ -0,0 +1,8 @@ +These are quick configs for performance or accuracy regression tracking purposes. + +* `*instance_test.yaml`: can train on 2 GPUs. They are used to test whether the training can + successfully finish. They are not expected to produce reasonable training results. +* `*inference_acc_test.yaml`: They should be run using `--eval-only`. They run inference using pre-trained models and verify + the results are as expected. +* `*training_acc_test.yaml`: They should be trained on 8 GPUs. They finish in about an hour and verify the training accuracy + is within the normal range. diff --git a/src/simpleimageclassifier/configs/quick_schedules/cascade_mask_rcnn_R_50_FPN_inference_acc_test.yaml b/src/simpleimageclassifier/configs/quick_schedules/cascade_mask_rcnn_R_50_FPN_inference_acc_test.yaml new file mode 100644 index 0000000..fc5a411 --- /dev/null +++ b/src/simpleimageclassifier/configs/quick_schedules/cascade_mask_rcnn_R_50_FPN_inference_acc_test.yaml @@ -0,0 +1,7 @@ +_BASE_: "../Misc/cascade_mask_rcnn_R_50_FPN_3x.yaml" +MODEL: + WEIGHTS: "detectron2://Misc/cascade_mask_rcnn_R_50_FPN_3x/144998488/model_final_480dd8.pkl" +DATASETS: + TEST: ("coco_2017_val_100",) +TEST: + EXPECTED_RESULTS: [["bbox", "AP", 50.18, 0.02], ["segm", "AP", 43.87, 0.02]] diff --git a/src/simpleimageclassifier/configs/quick_schedules/cascade_mask_rcnn_R_50_FPN_instant_test.yaml b/src/simpleimageclassifier/configs/quick_schedules/cascade_mask_rcnn_R_50_FPN_instant_test.yaml new file mode 100644 index 0000000..e41a0fe --- /dev/null +++ b/src/simpleimageclassifier/configs/quick_schedules/cascade_mask_rcnn_R_50_FPN_instant_test.yaml @@ -0,0 +1,11 @@ +_BASE_: "../Misc/cascade_mask_rcnn_R_50_FPN_3x.yaml" +DATASETS: + TRAIN: ("coco_2017_val_100",) + TEST: ("coco_2017_val_100",) +SOLVER: + BASE_LR: 0.005 + STEPS: (30,) + MAX_ITER: 40 + IMS_PER_BATCH: 4 +DATALOADER: + NUM_WORKERS: 2 diff --git a/src/simpleimageclassifier/configs/quick_schedules/fast_rcnn_R_50_FPN_inference_acc_test.yaml b/src/simpleimageclassifier/configs/quick_schedules/fast_rcnn_R_50_FPN_inference_acc_test.yaml new file mode 100644 index 0000000..a2f37e5 --- /dev/null +++ b/src/simpleimageclassifier/configs/quick_schedules/fast_rcnn_R_50_FPN_inference_acc_test.yaml @@ -0,0 +1,7 @@ +_BASE_: "../COCO-Detection/fast_rcnn_R_50_FPN_1x.yaml" +MODEL: + WEIGHTS: "detectron2://COCO-Detection/fast_rcnn_R_50_FPN_1x/137635226/model_final_e5f7ce.pkl" +DATASETS: + TEST: ("coco_2017_val_100",) +TEST: + EXPECTED_RESULTS: [["bbox", "AP", 45.70, 0.02]] diff --git a/src/simpleimageclassifier/configs/quick_schedules/fast_rcnn_R_50_FPN_instant_test.yaml b/src/simpleimageclassifier/configs/quick_schedules/fast_rcnn_R_50_FPN_instant_test.yaml new file mode 100644 index 0000000..52fc0ec --- /dev/null +++ b/src/simpleimageclassifier/configs/quick_schedules/fast_rcnn_R_50_FPN_instant_test.yaml @@ -0,0 +1,15 @@ +_BASE_: "../COCO-Detection/fast_rcnn_R_50_FPN_1x.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" +DATASETS: + TRAIN: ("coco_2017_val_100",) + PROPOSAL_FILES_TRAIN: ("detectron2://COCO-Detection/rpn_R_50_FPN_1x/137258492/coco_2017_val_box_proposals_ee0dad.pkl", ) + TEST: ("coco_2017_val_100",) + PROPOSAL_FILES_TEST: ("detectron2://COCO-Detection/rpn_R_50_FPN_1x/137258492/coco_2017_val_box_proposals_ee0dad.pkl", ) +SOLVER: + BASE_LR: 0.005 + STEPS: (30,) + MAX_ITER: 40 + IMS_PER_BATCH: 4 +DATALOADER: + NUM_WORKERS: 2 diff --git a/src/simpleimageclassifier/configs/quick_schedules/keypoint_rcnn_R_50_FPN_inference_acc_test.yaml b/src/simpleimageclassifier/configs/quick_schedules/keypoint_rcnn_R_50_FPN_inference_acc_test.yaml new file mode 100644 index 0000000..14cf2aa --- /dev/null +++ b/src/simpleimageclassifier/configs/quick_schedules/keypoint_rcnn_R_50_FPN_inference_acc_test.yaml @@ -0,0 +1,7 @@ +_BASE_: "../COCO-Keypoints/keypoint_rcnn_R_50_FPN_3x.yaml" +MODEL: + WEIGHTS: "detectron2://COCO-Keypoints/keypoint_rcnn_R_50_FPN_3x/137849621/model_final_a6e10b.pkl" +DATASETS: + TEST: ("keypoints_coco_2017_val_100",) +TEST: + EXPECTED_RESULTS: [["bbox", "AP", 52.47, 0.02], ["keypoints", "AP", 67.36, 0.02]] diff --git a/src/simpleimageclassifier/configs/quick_schedules/keypoint_rcnn_R_50_FPN_instant_test.yaml b/src/simpleimageclassifier/configs/quick_schedules/keypoint_rcnn_R_50_FPN_instant_test.yaml new file mode 100644 index 0000000..3dd209f --- /dev/null +++ b/src/simpleimageclassifier/configs/quick_schedules/keypoint_rcnn_R_50_FPN_instant_test.yaml @@ -0,0 +1,16 @@ +_BASE_: "../Base-RCNN-FPN.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + KEYPOINT_ON: True + ROI_HEADS: + NUM_CLASSES: 1 +DATASETS: + TRAIN: ("keypoints_coco_2017_val_100",) + TEST: ("keypoints_coco_2017_val_100",) +SOLVER: + BASE_LR: 0.005 + STEPS: (30,) + MAX_ITER: 40 + IMS_PER_BATCH: 4 +DATALOADER: + NUM_WORKERS: 2 diff --git a/src/simpleimageclassifier/configs/quick_schedules/keypoint_rcnn_R_50_FPN_normalized_training_acc_test.yaml b/src/simpleimageclassifier/configs/quick_schedules/keypoint_rcnn_R_50_FPN_normalized_training_acc_test.yaml new file mode 100644 index 0000000..4b92392 --- /dev/null +++ b/src/simpleimageclassifier/configs/quick_schedules/keypoint_rcnn_R_50_FPN_normalized_training_acc_test.yaml @@ -0,0 +1,30 @@ +_BASE_: "../Base-RCNN-FPN.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + KEYPOINT_ON: True + RESNETS: + DEPTH: 50 + ROI_HEADS: + BATCH_SIZE_PER_IMAGE: 256 + NUM_CLASSES: 1 + ROI_KEYPOINT_HEAD: + POOLER_RESOLUTION: 14 + POOLER_SAMPLING_RATIO: 2 + NORMALIZE_LOSS_BY_VISIBLE_KEYPOINTS: False + LOSS_WEIGHT: 4.0 + ROI_BOX_HEAD: + SMOOTH_L1_BETA: 1.0 # Keypoint AP degrades when using plain L1 loss + RPN: + SMOOTH_L1_BETA: 0.2 # Keypoint AP degrades when using plain L1 loss +DATASETS: + TRAIN: ("keypoints_coco_2017_val",) + TEST: ("keypoints_coco_2017_val",) +INPUT: + MIN_SIZE_TRAIN: (640, 672, 704, 736, 768, 800) +SOLVER: + WARMUP_FACTOR: 0.33333333 + WARMUP_ITERS: 100 + STEPS: (5500, 5800) + MAX_ITER: 6000 +TEST: + EXPECTED_RESULTS: [["bbox", "AP", 55.35, 1.0], ["keypoints", "AP", 76.91, 1.0]] diff --git a/src/simpleimageclassifier/configs/quick_schedules/keypoint_rcnn_R_50_FPN_training_acc_test.yaml b/src/simpleimageclassifier/configs/quick_schedules/keypoint_rcnn_R_50_FPN_training_acc_test.yaml new file mode 100644 index 0000000..9bd9628 --- /dev/null +++ b/src/simpleimageclassifier/configs/quick_schedules/keypoint_rcnn_R_50_FPN_training_acc_test.yaml @@ -0,0 +1,28 @@ +_BASE_: "../Base-RCNN-FPN.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + KEYPOINT_ON: True + RESNETS: + DEPTH: 50 + ROI_HEADS: + BATCH_SIZE_PER_IMAGE: 256 + NUM_CLASSES: 1 + ROI_KEYPOINT_HEAD: + POOLER_RESOLUTION: 14 + POOLER_SAMPLING_RATIO: 2 + ROI_BOX_HEAD: + SMOOTH_L1_BETA: 1.0 # Keypoint AP degrades when using plain L1 loss + RPN: + SMOOTH_L1_BETA: 0.2 # Keypoint AP degrades when using plain L1 loss +DATASETS: + TRAIN: ("keypoints_coco_2017_val",) + TEST: ("keypoints_coco_2017_val",) +INPUT: + MIN_SIZE_TRAIN: (640, 672, 704, 736, 768, 800) +SOLVER: + WARMUP_FACTOR: 0.33333333 + WARMUP_ITERS: 100 + STEPS: (5500, 5800) + MAX_ITER: 6000 +TEST: + EXPECTED_RESULTS: [["bbox", "AP", 53.5, 1.0], ["keypoints", "AP", 72.4, 1.0]] diff --git a/src/simpleimageclassifier/configs/quick_schedules/mask_rcnn_R_50_C4_GCV_instant_test.yaml b/src/simpleimageclassifier/configs/quick_schedules/mask_rcnn_R_50_C4_GCV_instant_test.yaml new file mode 100644 index 0000000..ab6e698 --- /dev/null +++ b/src/simpleimageclassifier/configs/quick_schedules/mask_rcnn_R_50_C4_GCV_instant_test.yaml @@ -0,0 +1,18 @@ +_BASE_: "../Base-RCNN-C4.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + MASK_ON: True +DATASETS: + TRAIN: ("coco_2017_val_100",) + TEST: ("coco_2017_val_100",) +SOLVER: + BASE_LR: 0.001 + STEPS: (30,) + MAX_ITER: 40 + IMS_PER_BATCH: 4 + CLIP_GRADIENTS: + ENABLED: True + CLIP_TYPE: "value" + CLIP_VALUE: 1.0 +DATALOADER: + NUM_WORKERS: 2 diff --git a/src/simpleimageclassifier/configs/quick_schedules/mask_rcnn_R_50_C4_inference_acc_test.yaml b/src/simpleimageclassifier/configs/quick_schedules/mask_rcnn_R_50_C4_inference_acc_test.yaml new file mode 100644 index 0000000..b2d5b7f --- /dev/null +++ b/src/simpleimageclassifier/configs/quick_schedules/mask_rcnn_R_50_C4_inference_acc_test.yaml @@ -0,0 +1,7 @@ +_BASE_: "../COCO-InstanceSegmentation/mask_rcnn_R_50_C4_3x.yaml" +MODEL: + WEIGHTS: "detectron2://COCO-InstanceSegmentation/mask_rcnn_R_50_C4_3x/137849525/model_final_4ce675.pkl" +DATASETS: + TEST: ("coco_2017_val_100",) +TEST: + EXPECTED_RESULTS: [["bbox", "AP", 47.37, 0.02], ["segm", "AP", 40.99, 0.02]] diff --git a/src/simpleimageclassifier/configs/quick_schedules/mask_rcnn_R_50_C4_instant_test.yaml b/src/simpleimageclassifier/configs/quick_schedules/mask_rcnn_R_50_C4_instant_test.yaml new file mode 100644 index 0000000..6c4f121 --- /dev/null +++ b/src/simpleimageclassifier/configs/quick_schedules/mask_rcnn_R_50_C4_instant_test.yaml @@ -0,0 +1,14 @@ +_BASE_: "../Base-RCNN-C4.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + MASK_ON: True +DATASETS: + TRAIN: ("coco_2017_val_100",) + TEST: ("coco_2017_val_100",) +SOLVER: + BASE_LR: 0.001 + STEPS: (30,) + MAX_ITER: 40 + IMS_PER_BATCH: 4 +DATALOADER: + NUM_WORKERS: 2 diff --git a/src/simpleimageclassifier/configs/quick_schedules/mask_rcnn_R_50_C4_training_acc_test.yaml b/src/simpleimageclassifier/configs/quick_schedules/mask_rcnn_R_50_C4_training_acc_test.yaml new file mode 100644 index 0000000..f68dd8f --- /dev/null +++ b/src/simpleimageclassifier/configs/quick_schedules/mask_rcnn_R_50_C4_training_acc_test.yaml @@ -0,0 +1,22 @@ +_BASE_: "../Base-RCNN-C4.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + ROI_HEADS: + BATCH_SIZE_PER_IMAGE: 256 + MASK_ON: True +DATASETS: + TRAIN: ("coco_2017_val",) + TEST: ("coco_2017_val",) +INPUT: + MIN_SIZE_TRAIN: (600,) + MAX_SIZE_TRAIN: 1000 + MIN_SIZE_TEST: 800 + MAX_SIZE_TEST: 1000 +SOLVER: + IMS_PER_BATCH: 8 # base uses 16 + WARMUP_FACTOR: 0.33333 + WARMUP_ITERS: 100 + STEPS: (11000, 11600) + MAX_ITER: 12000 +TEST: + EXPECTED_RESULTS: [["bbox", "AP", 41.88, 0.7], ["segm", "AP", 33.79, 0.5]] diff --git a/src/simpleimageclassifier/configs/quick_schedules/mask_rcnn_R_50_DC5_inference_acc_test.yaml b/src/simpleimageclassifier/configs/quick_schedules/mask_rcnn_R_50_DC5_inference_acc_test.yaml new file mode 100644 index 0000000..e3ce6cf --- /dev/null +++ b/src/simpleimageclassifier/configs/quick_schedules/mask_rcnn_R_50_DC5_inference_acc_test.yaml @@ -0,0 +1,7 @@ +_BASE_: "../COCO-InstanceSegmentation/mask_rcnn_R_50_DC5_3x.yaml" +MODEL: + WEIGHTS: "detectron2://COCO-InstanceSegmentation/mask_rcnn_R_50_DC5_3x/137849551/model_final_84107b.pkl" +DATASETS: + TEST: ("coco_2017_val_100",) +TEST: + EXPECTED_RESULTS: [["bbox", "AP", 47.44, 0.02], ["segm", "AP", 42.94, 0.02]] diff --git a/src/simpleimageclassifier/configs/quick_schedules/mask_rcnn_R_50_FPN_inference_acc_test.yaml b/src/simpleimageclassifier/configs/quick_schedules/mask_rcnn_R_50_FPN_inference_acc_test.yaml new file mode 100644 index 0000000..e5454bf --- /dev/null +++ b/src/simpleimageclassifier/configs/quick_schedules/mask_rcnn_R_50_FPN_inference_acc_test.yaml @@ -0,0 +1,10 @@ +_BASE_: "../COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_3x.yaml" +MODEL: + WEIGHTS: "detectron2://COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_3x/137849600/model_final_f10217.pkl" +DATASETS: + TEST: ("coco_2017_val_100",) +TEST: + EXPECTED_RESULTS: [["bbox", "AP", 47.34, 0.02], ["segm", "AP", 42.67, 0.02], ["bbox_TTA", "AP", 49.11, 0.02], ["segm_TTA", "AP", 45.04, 0.02]] + AUG: + ENABLED: True + MIN_SIZES: (700, 800) # to save some time diff --git a/src/simpleimageclassifier/configs/quick_schedules/mask_rcnn_R_50_FPN_instant_test.yaml b/src/simpleimageclassifier/configs/quick_schedules/mask_rcnn_R_50_FPN_instant_test.yaml new file mode 100644 index 0000000..6dbfcde --- /dev/null +++ b/src/simpleimageclassifier/configs/quick_schedules/mask_rcnn_R_50_FPN_instant_test.yaml @@ -0,0 +1,14 @@ +_BASE_: "../Base-RCNN-FPN.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + MASK_ON: True +DATASETS: + TRAIN: ("coco_2017_val_100",) + TEST: ("coco_2017_val_100",) +SOLVER: + BASE_LR: 0.005 + STEPS: (30,) + MAX_ITER: 40 + IMS_PER_BATCH: 4 +DATALOADER: + NUM_WORKERS: 2 diff --git a/src/simpleimageclassifier/configs/quick_schedules/mask_rcnn_R_50_FPN_pred_boxes_training_acc_test.yaml b/src/simpleimageclassifier/configs/quick_schedules/mask_rcnn_R_50_FPN_pred_boxes_training_acc_test.yaml new file mode 100644 index 0000000..52f7876 --- /dev/null +++ b/src/simpleimageclassifier/configs/quick_schedules/mask_rcnn_R_50_FPN_pred_boxes_training_acc_test.yaml @@ -0,0 +1,6 @@ +_BASE_: "./mask_rcnn_R_50_FPN_training_acc_test.yaml" +MODEL: + ROI_BOX_HEAD: + TRAIN_ON_PRED_BOXES: True +TEST: + EXPECTED_RESULTS: [["bbox", "AP", 42.6, 1.0], ["segm", "AP", 35.8, 0.8]] diff --git a/src/simpleimageclassifier/configs/quick_schedules/mask_rcnn_R_50_FPN_training_acc_test.yaml b/src/simpleimageclassifier/configs/quick_schedules/mask_rcnn_R_50_FPN_training_acc_test.yaml new file mode 100644 index 0000000..aadae4c --- /dev/null +++ b/src/simpleimageclassifier/configs/quick_schedules/mask_rcnn_R_50_FPN_training_acc_test.yaml @@ -0,0 +1,21 @@ +_BASE_: "../Base-RCNN-FPN.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + ROI_HEADS: + BATCH_SIZE_PER_IMAGE: 256 + MASK_ON: True +DATASETS: + TRAIN: ("coco_2017_val",) + TEST: ("coco_2017_val",) +INPUT: + MIN_SIZE_TRAIN: (600,) + MAX_SIZE_TRAIN: 1000 + MIN_SIZE_TEST: 800 + MAX_SIZE_TEST: 1000 +SOLVER: + WARMUP_FACTOR: 0.3333333 + WARMUP_ITERS: 100 + STEPS: (5500, 5800) + MAX_ITER: 6000 +TEST: + EXPECTED_RESULTS: [["bbox", "AP", 42.5, 1.0], ["segm", "AP", 35.8, 0.8]] diff --git a/src/simpleimageclassifier/configs/quick_schedules/panoptic_fpn_R_50_inference_acc_test.yaml b/src/simpleimageclassifier/configs/quick_schedules/panoptic_fpn_R_50_inference_acc_test.yaml new file mode 100644 index 0000000..70874e3 --- /dev/null +++ b/src/simpleimageclassifier/configs/quick_schedules/panoptic_fpn_R_50_inference_acc_test.yaml @@ -0,0 +1,7 @@ +_BASE_: "../COCO-PanopticSegmentation/panoptic_fpn_R_50_3x.yaml" +MODEL: + WEIGHTS: "detectron2://COCO-PanopticSegmentation/panoptic_fpn_R_50_3x/139514569/model_final_c10459.pkl" +DATASETS: + TEST: ("coco_2017_val_100_panoptic_separated",) +TEST: + EXPECTED_RESULTS: [["bbox", "AP", 46.47, 0.02], ["segm", "AP", 43.39, 0.02], ["sem_seg", "mIoU", 42.55, 0.02], ["panoptic_seg", "PQ", 38.99, 0.02]] diff --git a/src/simpleimageclassifier/configs/quick_schedules/panoptic_fpn_R_50_instant_test.yaml b/src/simpleimageclassifier/configs/quick_schedules/panoptic_fpn_R_50_instant_test.yaml new file mode 100644 index 0000000..7cdee7b --- /dev/null +++ b/src/simpleimageclassifier/configs/quick_schedules/panoptic_fpn_R_50_instant_test.yaml @@ -0,0 +1,19 @@ +_BASE_: "../Base-RCNN-FPN.yaml" +MODEL: + META_ARCHITECTURE: "PanopticFPN" + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + MASK_ON: True + RESNETS: + DEPTH: 50 + SEM_SEG_HEAD: + LOSS_WEIGHT: 0.5 +DATASETS: + TRAIN: ("coco_2017_val_100_panoptic_separated",) + TEST: ("coco_2017_val_100_panoptic_separated",) +SOLVER: + BASE_LR: 0.005 + STEPS: (30,) + MAX_ITER: 40 + IMS_PER_BATCH: 4 +DATALOADER: + NUM_WORKERS: 1 diff --git a/src/simpleimageclassifier/configs/quick_schedules/panoptic_fpn_R_50_training_acc_test.yaml b/src/simpleimageclassifier/configs/quick_schedules/panoptic_fpn_R_50_training_acc_test.yaml new file mode 100644 index 0000000..f3bbf30 --- /dev/null +++ b/src/simpleimageclassifier/configs/quick_schedules/panoptic_fpn_R_50_training_acc_test.yaml @@ -0,0 +1,20 @@ +_BASE_: "../Base-RCNN-FPN.yaml" +MODEL: + META_ARCHITECTURE: "PanopticFPN" + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + MASK_ON: True + RESNETS: + DEPTH: 50 + SEM_SEG_HEAD: + LOSS_WEIGHT: 0.5 +DATASETS: + TRAIN: ("coco_2017_val_panoptic_separated",) + TEST: ("coco_2017_val_panoptic_separated",) +SOLVER: + BASE_LR: 0.01 + WARMUP_FACTOR: 0.001 + WARMUP_ITERS: 500 + STEPS: (5500,) + MAX_ITER: 7000 +TEST: + EXPECTED_RESULTS: [["bbox", "AP", 46.70, 1.1], ["segm", "AP", 39.0, 0.7], ["sem_seg", "mIoU", 64.73, 1.3], ["panoptic_seg", "PQ", 48.13, 0.8]] diff --git a/src/simpleimageclassifier/configs/quick_schedules/retinanet_R_50_FPN_inference_acc_test.yaml b/src/simpleimageclassifier/configs/quick_schedules/retinanet_R_50_FPN_inference_acc_test.yaml new file mode 100644 index 0000000..cb666c1 --- /dev/null +++ b/src/simpleimageclassifier/configs/quick_schedules/retinanet_R_50_FPN_inference_acc_test.yaml @@ -0,0 +1,7 @@ +_BASE_: "../COCO-Detection/retinanet_R_50_FPN_3x.yaml" +MODEL: + WEIGHTS: "detectron2://COCO-Detection/retinanet_R_50_FPN_3x/190397829/model_final_5bd44e.pkl" +DATASETS: + TEST: ("coco_2017_val_100",) +TEST: + EXPECTED_RESULTS: [["bbox", "AP", 44.45, 0.02]] diff --git a/src/simpleimageclassifier/configs/quick_schedules/retinanet_R_50_FPN_instant_test.yaml b/src/simpleimageclassifier/configs/quick_schedules/retinanet_R_50_FPN_instant_test.yaml new file mode 100644 index 0000000..8d95c1f --- /dev/null +++ b/src/simpleimageclassifier/configs/quick_schedules/retinanet_R_50_FPN_instant_test.yaml @@ -0,0 +1,13 @@ +_BASE_: "../COCO-Detection/retinanet_R_50_FPN_1x.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" +DATASETS: + TRAIN: ("coco_2017_val_100",) + TEST: ("coco_2017_val_100",) +SOLVER: + BASE_LR: 0.005 + STEPS: (30,) + MAX_ITER: 40 + IMS_PER_BATCH: 4 +DATALOADER: + NUM_WORKERS: 2 diff --git a/src/simpleimageclassifier/configs/quick_schedules/rpn_R_50_FPN_inference_acc_test.yaml b/src/simpleimageclassifier/configs/quick_schedules/rpn_R_50_FPN_inference_acc_test.yaml new file mode 100644 index 0000000..c7c3f90 --- /dev/null +++ b/src/simpleimageclassifier/configs/quick_schedules/rpn_R_50_FPN_inference_acc_test.yaml @@ -0,0 +1,7 @@ +_BASE_: "../COCO-Detection/rpn_R_50_FPN_1x.yaml" +MODEL: + WEIGHTS: "detectron2://COCO-Detection/rpn_R_50_FPN_1x/137258492/model_final_02ce48.pkl" +DATASETS: + TEST: ("coco_2017_val_100",) +TEST: + EXPECTED_RESULTS: [["box_proposals", "AR@1000", 58.16, 0.02]] diff --git a/src/simpleimageclassifier/configs/quick_schedules/rpn_R_50_FPN_instant_test.yaml b/src/simpleimageclassifier/configs/quick_schedules/rpn_R_50_FPN_instant_test.yaml new file mode 100644 index 0000000..402d432 --- /dev/null +++ b/src/simpleimageclassifier/configs/quick_schedules/rpn_R_50_FPN_instant_test.yaml @@ -0,0 +1,13 @@ +_BASE_: "../COCO-Detection/rpn_R_50_FPN_1x.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" +DATASETS: + TRAIN: ("coco_2017_val_100",) + TEST: ("coco_2017_val_100",) +SOLVER: + STEPS: (30,) + MAX_ITER: 40 + BASE_LR: 0.005 + IMS_PER_BATCH: 4 +DATALOADER: + NUM_WORKERS: 2 diff --git a/src/simpleimageclassifier/configs/quick_schedules/semantic_R_50_FPN_inference_acc_test.yaml b/src/simpleimageclassifier/configs/quick_schedules/semantic_R_50_FPN_inference_acc_test.yaml new file mode 100644 index 0000000..bca7498 --- /dev/null +++ b/src/simpleimageclassifier/configs/quick_schedules/semantic_R_50_FPN_inference_acc_test.yaml @@ -0,0 +1,10 @@ +_BASE_: "../Base-RCNN-FPN.yaml" +MODEL: + META_ARCHITECTURE: "SemanticSegmentor" + WEIGHTS: "detectron2://semantic_R_50_FPN_1x/111802073/model_final_c18079783c55a94968edc28b7101c5f0.pkl" + RESNETS: + DEPTH: 50 +DATASETS: + TEST: ("coco_2017_val_100_panoptic_stuffonly",) +TEST: + EXPECTED_RESULTS: [["sem_seg", "mIoU", 39.53, 0.02], ["sem_seg", "mACC", 51.50, 0.02]] diff --git a/src/simpleimageclassifier/configs/quick_schedules/semantic_R_50_FPN_instant_test.yaml b/src/simpleimageclassifier/configs/quick_schedules/semantic_R_50_FPN_instant_test.yaml new file mode 100644 index 0000000..14ab606 --- /dev/null +++ b/src/simpleimageclassifier/configs/quick_schedules/semantic_R_50_FPN_instant_test.yaml @@ -0,0 +1,18 @@ +_BASE_: "../Base-RCNN-FPN.yaml" +MODEL: + META_ARCHITECTURE: "SemanticSegmentor" + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + RESNETS: + DEPTH: 50 +DATASETS: + TRAIN: ("coco_2017_val_100_panoptic_stuffonly",) + TEST: ("coco_2017_val_100_panoptic_stuffonly",) +INPUT: + MIN_SIZE_TRAIN: (640, 672, 704, 736, 768, 800) +SOLVER: + BASE_LR: 0.005 + STEPS: (30,) + MAX_ITER: 40 + IMS_PER_BATCH: 4 +DATALOADER: + NUM_WORKERS: 2 diff --git a/src/simpleimageclassifier/configs/quick_schedules/semantic_R_50_FPN_training_acc_test.yaml b/src/simpleimageclassifier/configs/quick_schedules/semantic_R_50_FPN_training_acc_test.yaml new file mode 100644 index 0000000..1f78d77 --- /dev/null +++ b/src/simpleimageclassifier/configs/quick_schedules/semantic_R_50_FPN_training_acc_test.yaml @@ -0,0 +1,20 @@ +_BASE_: "../Base-RCNN-FPN.yaml" +MODEL: + META_ARCHITECTURE: "SemanticSegmentor" + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + RESNETS: + DEPTH: 50 +DATASETS: + TRAIN: ("coco_2017_val_panoptic_stuffonly",) + TEST: ("coco_2017_val_panoptic_stuffonly",) +SOLVER: + BASE_LR: 0.01 + WARMUP_FACTOR: 0.001 + WARMUP_ITERS: 300 + STEPS: (5500,) + MAX_ITER: 7000 +TEST: + EXPECTED_RESULTS: [["sem_seg", "mIoU", 76.51, 1.0], ["sem_seg", "mACC", 83.25, 1.0]] +INPUT: + # no scale augmentation + MIN_SIZE_TRAIN: (800, ) diff --git a/src/simpleimageclassifier/predictor.py b/src/simpleimageclassifier/predictor.py new file mode 100644 index 0000000..85d9d91 --- /dev/null +++ b/src/simpleimageclassifier/predictor.py @@ -0,0 +1,219 @@ +import atexit +import bisect +import multiprocessing as mp +from collections import deque +import cv2 +import torch + +from detectron2.data import MetadataCatalog +from detectron2.engine.defaults import DefaultPredictor +from detectron2.utils.video_visualizer import VideoVisualizer +from detectron2.utils.visualizer import ColorMode, Visualizer + + +class VisualizationDemo(object): + def __init__(self, cfg, instance_mode=ColorMode.IMAGE, parallel=False): + """ + Args: + cfg (CfgNode): + instance_mode (ColorMode): + parallel (bool): whether to run the model in different processes from visualization. + Useful since the visualization logic can be slow. + """ + self.metadata = MetadataCatalog.get( + cfg.DATASETS.TEST[0] if len(cfg.DATASETS.TEST) else "__unused" + ) + self.cpu_device = torch.device("cpu") + self.instance_mode = instance_mode + + self.parallel = parallel + if parallel: + num_gpu = torch.cuda.device_count() + self.predictor = AsyncPredictor(cfg, num_gpus=num_gpu) + else: + self.predictor = DefaultPredictor(cfg) + + def run_on_image(self, image): + """ + Args: + image (np.ndarray): an image of shape (H, W, C) (in BGR order). + This is the format used by OpenCV. + + Returns: + predictions (dict): the output of the model. + vis_output (VisImage): the visualized image output. + """ + vis_output = None + predictions = self.predictor(image) + # Convert image from OpenCV BGR format to Matplotlib RGB format. + image = image[:, :, ::-1] + visualizer = Visualizer(image, self.metadata, instance_mode=self.instance_mode) + if "panoptic_seg" in predictions: + panoptic_seg, segments_info = predictions["panoptic_seg"] + vis_output = visualizer.draw_panoptic_seg_predictions( + panoptic_seg.to(self.cpu_device), segments_info + ) + else: + if "sem_seg" in predictions: + vis_output = visualizer.draw_sem_seg( + predictions["sem_seg"].argmax(dim=0).to(self.cpu_device) + ) + if "instances" in predictions: + instances = predictions["instances"].to(self.cpu_device) + vis_output = visualizer.draw_instance_predictions(predictions=instances) + + return predictions, vis_output + + def _frame_from_video(self, video): + while video.isOpened(): + success, frame = video.read() + if success: + yield frame + else: + break + + def run_on_video(self, video): + """ + Visualizes predictions on frames of the input video. + + Args: + video (cv2.VideoCapture): a :class:`VideoCapture` object, whose source can be + either a webcam or a video file. + + Yields: + ndarray: BGR visualizations of each video frame. + """ + video_visualizer = VideoVisualizer(self.metadata, self.instance_mode) + + def process_predictions(frame, predictions): + frame = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB) + if "panoptic_seg" in predictions: + panoptic_seg, segments_info = predictions["panoptic_seg"] + vis_frame = video_visualizer.draw_panoptic_seg_predictions( + frame, panoptic_seg.to(self.cpu_device), segments_info + ) + elif "instances" in predictions: + predictions = predictions["instances"].to(self.cpu_device) + vis_frame = video_visualizer.draw_instance_predictions(frame, predictions) + elif "sem_seg" in predictions: + vis_frame = video_visualizer.draw_sem_seg( + frame, predictions["sem_seg"].argmax(dim=0).to(self.cpu_device) + ) + + # Converts Matplotlib RGB format to OpenCV BGR format + vis_frame = cv2.cvtColor(vis_frame.get_image(), cv2.COLOR_RGB2BGR) + return vis_frame + + frame_gen = self._frame_from_video(video) + if self.parallel: + buffer_size = self.predictor.default_buffer_size + + frame_data = deque() + + for cnt, frame in enumerate(frame_gen): + frame_data.append(frame) + self.predictor.put(frame) + + if cnt >= buffer_size: + frame = frame_data.popleft() + predictions = self.predictor.get() + yield process_predictions(frame, predictions) + + while len(frame_data): + frame = frame_data.popleft() + predictions = self.predictor.get() + yield process_predictions(frame, predictions) + else: + for frame in frame_gen: + yield process_predictions(frame, self.predictor(frame)) + + +class AsyncPredictor: + """ + A predictor that runs the model asynchronously, possibly on >1 GPUs. + Because rendering the visualization takes considerably amount of time, + this helps improve throughput a little bit when rendering videos. + """ + + class _StopToken: + pass + + class _PredictWorker(mp.Process): + def __init__(self, cfg, task_queue, result_queue): + self.cfg = cfg + self.task_queue = task_queue + self.result_queue = result_queue + super().__init__() + + def run(self): + predictor = DefaultPredictor(self.cfg) + + while True: + task = self.task_queue.get() + if isinstance(task, AsyncPredictor._StopToken): + break + idx, data = task + result = predictor(data) + self.result_queue.put((idx, result)) + + def __init__(self, cfg, num_gpus: int = 1): + """ + Args: + cfg (CfgNode): + num_gpus (int): if 0, will run on CPU + """ + num_workers = max(num_gpus, 1) + self.task_queue = mp.Queue(maxsize=num_workers * 3) + self.result_queue = mp.Queue(maxsize=num_workers * 3) + self.procs = [] + for gpuid in range(max(num_gpus, 1)): + cfg = cfg.clone() + cfg.defrost() + cfg.MODEL.DEVICE = "cuda:{}".format(gpuid) if num_gpus > 0 else "cpu" + self.procs.append( + AsyncPredictor._PredictWorker(cfg, self.task_queue, self.result_queue) + ) + + self.put_idx = 0 + self.get_idx = 0 + self.result_rank = [] + self.result_data = [] + + for p in self.procs: + p.start() + atexit.register(self.shutdown) + + def put(self, image): + self.put_idx += 1 + self.task_queue.put((self.put_idx, image)) + + def get(self): + self.get_idx += 1 # the index needed for this request + if len(self.result_rank) and self.result_rank[0] == self.get_idx: + res = self.result_data[0] + del self.result_data[0], self.result_rank[0] + return res + + while True: + # make sure the results are returned in the correct order + idx, res = self.result_queue.get() + if idx == self.get_idx: + return res + insert = bisect.bisect(self.result_rank, idx) + self.result_rank.insert(insert, idx) + self.result_data.insert(insert, res) + + def __len__(self): + return self.put_idx - self.get_idx + + def __call__(self, image): + self.put(image) + return self.get() + + def shutdown(self): + for _ in self.procs: + self.task_queue.put(AsyncPredictor._StopToken()) + + @property + def default_buffer_size(self): + return len(self.procs) * 5 \ No newline at end of file diff --git a/src/simpleimageclassifier/simpleimageclassifier.py b/src/simpleimageclassifier/simpleimageclassifier.py new file mode 100644 index 0000000..f183345 --- /dev/null +++ b/src/simpleimageclassifier/simpleimageclassifier.py @@ -0,0 +1,195 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# @author: Giancarlo Panichi +# +# Created on 2022/07/20 +# +import argparse +import glob +import multiprocessing as mp +import numpy as np +import os +import tempfile +import time +import warnings +import cv2 +import tqdm + +from detectron2.config import get_cfg +from detectron2.data.detection_utils import read_image +from detectron2.utils.logger import setup_logger + +from .predictor import VisualizationDemo + +# constants +WINDOW_NAME = "COCO detections" + + +def setup_cfg(args): + # load config from file and command-line arguments + cfg = get_cfg() + # To use demo for Panoptic-DeepLab, please uncomment the following two lines. + # from detectron2.projects.panoptic_deeplab import add_panoptic_deeplab_config # noqa + # add_panoptic_deeplab_config(cfg) + cfg.merge_from_file(args.config_file) + cfg.merge_from_list(args.opts) + # Set score_threshold for builtin models + cfg.MODEL.RETINANET.SCORE_THRESH_TEST = args.confidence_threshold + cfg.MODEL.ROI_HEADS.SCORE_THRESH_TEST = args.confidence_threshold + cfg.MODEL.PANOPTIC_FPN.COMBINE.INSTANCES_CONFIDENCE_THRESH = args.confidence_threshold + cfg.freeze() + return cfg + + +def get_parser(): + parser = argparse.ArgumentParser(description="Detectron2 demo for builtin configs") + parser.add_argument( + "--config-file", + default="configs/quick_schedules/mask_rcnn_R_50_FPN_inference_acc_test.yaml", + metavar="FILE", + help="path to config file", + ) + parser.add_argument("--webcam", action="store_true", help="Take inputs from webcam.") + parser.add_argument("--video-input", help="Path to video file.") + parser.add_argument( + "--input", + nargs="+", + help="A list of space separated input images; " + "or a single glob pattern such as 'directory/*.jpg'", + ) + parser.add_argument( + "--output", + help="A file or directory to save output visualizations. " + "If not given, will show output in an OpenCV window.", + ) + + parser.add_argument( + "--confidence-threshold", + type=float, + default=0.5, + help="Minimum score for instance predictions to be shown", + ) + parser.add_argument( + "--opts", + help="Modify config options using the command-line 'KEY VALUE' pairs", + default=[], + nargs=argparse.REMAINDER, + ) + return parser + + +def test_opencv_video_format(codec, file_ext): + with tempfile.TemporaryDirectory(prefix="video_format_test") as dir: + filename = os.path.join(dir, "test_file" + file_ext) + writer = cv2.VideoWriter( + filename=filename, + fourcc=cv2.VideoWriter_fourcc(*codec), + fps=float(30), + frameSize=(10, 10), + isColor=True, + ) + [writer.write(np.zeros((10, 10, 3), np.uint8)) for _ in range(30)] + writer.release() + if os.path.isfile(filename): + return True + return False + +def simpleimageclassifier(): + mp.set_start_method("spawn", force=True) + args = get_parser().parse_args() + setup_logger(name="fvcore") + logger = setup_logger() + logger.info("Arguments: " + str(args)) + + cfg = setup_cfg(args) + + demo = VisualizationDemo(cfg) + + if args.input: + if len(args.input) == 1: + args.input = glob.glob(os.path.expanduser(args.input[0])) + assert args.input, "The input path(s) was not found" + for path in tqdm.tqdm(args.input, disable=not args.output): + # use PIL, to be consistent with evaluation + img = read_image(path, format="BGR") + start_time = time.time() + predictions, visualized_output = demo.run_on_image(img) + logger.info( + "{}: {} in {:.2f}s".format( + path, + "detected {} instances".format(len(predictions["instances"])) + if "instances" in predictions + else "finished", + time.time() - start_time, + ) + ) + + if args.output: + if os.path.isdir(args.output): + assert os.path.isdir(args.output), args.output + out_filename = os.path.join(args.output, os.path.basename(path)) + else: + assert len(args.input) == 1, "Please specify a directory with args.output" + out_filename = args.output + visualized_output.save(out_filename) + else: + cv2.namedWindow(WINDOW_NAME, cv2.WINDOW_NORMAL) + cv2.imshow(WINDOW_NAME, visualized_output.get_image()[:,:,::-1]) + if cv2.waitKey(0) == 27: + break # esc to quit + elif args.webcam: + assert args.input is None, "Cannot have both --input and --webcam!" + assert args.output is None, "output not yet supported with --webcam!" + cam = cv2.VideoCapture(0) + for vis in tqdm.tqdm(demo.run_on_video(cam)): + cv2.namedWindow(WINDOW_NAME, cv2.WINDOW_NORMAL) + cv2.imshow(WINDOW_NAME, vis) + if cv2.waitKey(1) == 27: + break # esc to quit + cam.release() + cv2.destroyAllWindows() + elif args.video_input: + video = cv2.VideoCapture(args.video_input) + width = int(video.get(cv2.CAP_PROP_FRAME_WIDTH)) + height = int(video.get(cv2.CAP_PROP_FRAME_HEIGHT)) + frames_per_second = video.get(cv2.CAP_PROP_FPS) + num_frames = int(video.get(cv2.CAP_PROP_FRAME_COUNT)) + basename = os.path.basename(args.video_input) + codec, file_ext = ( + ("x264", ".mkv") if test_opencv_video_format("x264", ".mkv") else ("mp4v", ".mp4") + ) + if codec == ".mp4v": + warnings.warn("x264 codec not available, switching to mp4v") + if args.output: + if os.path.isdir(args.output): + output_fname = os.path.join(args.output, basename) + output_fname = os.path.splitext(output_fname)[0] + file_ext + else: + output_fname = args.output + assert not os.path.isfile(output_fname), output_fname + output_file = cv2.VideoWriter( + filename=output_fname, + # some installation of opencv may not support x264 (due to its license), + # you can try other format (e.g. MPEG) + fourcc=cv2.VideoWriter_fourcc(*codec), + fps=float(frames_per_second), + frameSize=(width, height), + isColor=True, + ) + assert os.path.isfile(args.video_input) + for vis_frame in tqdm.tqdm(demo.run_on_video(video), total=num_frames): + if args.output: + output_file.write(vis_frame) + else: + cv2.namedWindow(basename, cv2.WINDOW_NORMAL) + cv2.imshow(basename, vis_frame) + if cv2.waitKey(1) == 27: + break # esc to quit + video.release() + if args.output: + output_file.release() + else: + cv2.destroyAllWindows() + +