From ade21f142e527c50674192d1e801a7c1503a82f5 Mon Sep 17 00:00:00 2001 From: catree Date: Mon, 28 May 2018 01:33:56 +0200 Subject: [PATCH 01/40] Add Java and Python code for the following features2d tutorials: Harris corner detector, Shi-Tomasi corner detector, Creating your own corner detector, Detecting corners location in subpixels, Feature Detection, Feature Description, Feature Matching with FLANN, Features2D + Homography to find a known object. Use Lowe's ratio test to filter the matches. --- doc/opencv.bib | 34 ++++ .../feature_description.markdown | 81 ++------ .../feature_detection.markdown | 75 ++----- .../feature_flann_matcher.markdown | 155 +++++--------- .../Feature_FlannMatcher_Lowe_ratio_test.png | Bin 0 -> 54032 bytes ...Feature_FlannMatcher_Result_ratio_test.jpg | Bin 0 -> 80889 bytes .../feature_homography.markdown | 125 ++---------- .../images/Feature_Homography_Result.jpg | Bin 91652 -> 90026 bytes .../table_of_content_features2d.markdown | 28 ++- .../corner_subpixeles.markdown | 32 --- .../corner_subpixels.markdown | 46 +++++ .../Corner_Subpixels_Original_Image.jpg} | Bin .../images/Corner_Subpixels_Result.jpg} | Bin .../generic_corner_detector.markdown | 24 ++- .../good_features_to_track.markdown | 18 +- .../images/Feature_Detection_Result_a.jpg | Bin 35105 -> 0 bytes .../images/Feature_Detection_Result_b.jpg | Bin 67289 -> 0 bytes .../good_features_to_track_Shi_Tomasi.jpg | Bin 0 -> 27481 bytes .../harris_detector/harris_detector.markdown | 14 ++ .../TrackingMotion/cornerDetector_Demo.cpp | 158 ++++++++------- .../TrackingMotion/cornerHarris_Demo.cpp | 84 ++++---- .../TrackingMotion/cornerSubPix_Demo.cpp | 134 ++++++------ .../goodFeaturesToTrack_Demo.cpp | 108 +++++----- .../SURF_matching_Demo.cpp | 60 ++++++ .../feature_detection/SURF_detection_Demo.cpp | 46 +++++ .../SURF_FLANN_matching_Demo.cpp | 72 +++++++ .../SURF_FLANN_matching_homography_Demo.cpp | 107 ++++++++++ .../corner_subpixels/CornerSubPixDemo.java | 158 +++++++++++++++ .../CornerDetectorDemo.java | 190 ++++++++++++++++++ .../GoodFeaturesToTrackDemo.java | 134 ++++++++++++ .../harris_detector/CornerHarrisDemo.java | 142 +++++++++++++ .../feature_description/SURFMatchingDemo.java | 56 ++++++ .../feature_detection/SURFDetectionDemo.java | 44 ++++ .../SURFFLANNMatchingDemo.java | 78 +++++++ .../SURFFLANNMatchingHomographyDemo.java | 130 ++++++++++++ .../corner_subpixels/cornerSubPix_Demo.py | 70 +++++++ .../cornerDetector_Demo.py | 80 ++++++++ .../goodFeaturesToTrack_Demo.py | 58 ++++++ .../harris_detector/cornerHarris_Demo.py | 55 +++++ .../feature_description/SURF_matching_Demo.py | 35 ++++ .../feature_detection/SURF_detection_Demo.py | 27 +++ .../SURF_FLANN_matching_Demo.py | 43 ++++ .../SURF_FLANN_matching_homography_Demo.py | 78 +++++++ 43 files changed, 2168 insertions(+), 611 deletions(-) create mode 100644 doc/tutorials/features2d/feature_flann_matcher/images/Feature_FlannMatcher_Lowe_ratio_test.png create mode 100644 doc/tutorials/features2d/feature_flann_matcher/images/Feature_FlannMatcher_Result_ratio_test.jpg delete mode 100644 doc/tutorials/features2d/trackingmotion/corner_subpixeles/corner_subpixeles.markdown create mode 100644 doc/tutorials/features2d/trackingmotion/corner_subpixels/corner_subpixels.markdown rename doc/tutorials/features2d/trackingmotion/{corner_subpixeles/images/Corner_Subpixeles_Original_Image.jpg => corner_subpixels/images/Corner_Subpixels_Original_Image.jpg} (100%) rename doc/tutorials/features2d/trackingmotion/{corner_subpixeles/images/Corner_Subpixeles_Result.jpg => corner_subpixels/images/Corner_Subpixels_Result.jpg} (100%) delete mode 100644 doc/tutorials/features2d/trackingmotion/good_features_to_track/images/Feature_Detection_Result_a.jpg delete mode 100644 doc/tutorials/features2d/trackingmotion/good_features_to_track/images/Feature_Detection_Result_b.jpg create mode 100644 doc/tutorials/features2d/trackingmotion/good_features_to_track/images/good_features_to_track_Shi_Tomasi.jpg create mode 100755 samples/cpp/tutorial_code/features2D/feature_description/SURF_matching_Demo.cpp create mode 100755 samples/cpp/tutorial_code/features2D/feature_detection/SURF_detection_Demo.cpp create mode 100755 samples/cpp/tutorial_code/features2D/feature_flann_matcher/SURF_FLANN_matching_Demo.cpp create mode 100755 samples/cpp/tutorial_code/features2D/feature_homography/SURF_FLANN_matching_homography_Demo.cpp create mode 100644 samples/java/tutorial_code/TrackingMotion/corner_subpixels/CornerSubPixDemo.java create mode 100644 samples/java/tutorial_code/TrackingMotion/generic_corner_detector/CornerDetectorDemo.java create mode 100644 samples/java/tutorial_code/TrackingMotion/good_features_to_track/GoodFeaturesToTrackDemo.java create mode 100644 samples/java/tutorial_code/TrackingMotion/harris_detector/CornerHarrisDemo.java create mode 100644 samples/java/tutorial_code/features2D/feature_description/SURFMatchingDemo.java create mode 100644 samples/java/tutorial_code/features2D/feature_detection/SURFDetectionDemo.java create mode 100644 samples/java/tutorial_code/features2D/feature_flann_matcher/SURFFLANNMatchingDemo.java create mode 100644 samples/java/tutorial_code/features2D/feature_homography/SURFFLANNMatchingHomographyDemo.java create mode 100644 samples/python/tutorial_code/TrackingMotion/corner_subpixels/cornerSubPix_Demo.py create mode 100644 samples/python/tutorial_code/TrackingMotion/generic_corner_detector/cornerDetector_Demo.py create mode 100644 samples/python/tutorial_code/TrackingMotion/good_features_to_track/goodFeaturesToTrack_Demo.py create mode 100644 samples/python/tutorial_code/TrackingMotion/harris_detector/cornerHarris_Demo.py create mode 100644 samples/python/tutorial_code/features2D/feature_description/SURF_matching_Demo.py create mode 100644 samples/python/tutorial_code/features2D/feature_detection/SURF_detection_Demo.py create mode 100644 samples/python/tutorial_code/features2D/feature_flann_matcher/SURF_FLANN_matching_Demo.py create mode 100644 samples/python/tutorial_code/features2D/feature_homography/SURF_FLANN_matching_homography_Demo.py diff --git a/doc/opencv.bib b/doc/opencv.bib index 1cb3d0b0af..edb7033e8d 100644 --- a/doc/opencv.bib +++ b/doc/opencv.bib @@ -20,6 +20,21 @@ volume = {34}, number = {7} } +@INPROCEEDINGS{Arandjelovic:2012:TTE:2354409.2355123, + author = {Arandjelovic, Relja}, + title = {Three Things Everyone Should Know to Improve Object Retrieval}, + booktitle = {Proceedings of the 2012 IEEE Conference on Computer Vision and Pattern Recognition (CVPR)}, + series = {CVPR '12}, + year = {2012}, + isbn = {978-1-4673-1226-4}, + pages = {2911--2918}, + numpages = {8}, + url = {http://dl.acm.org/citation.cfm?id=2354409.2355123}, + acmid = {2355123}, + publisher = {IEEE Computer Society}, + address = {Washington, DC, USA}, + keywords = {Vectors,Visualization,Kernel,Standards,Support vector machines,Indexes,Euclidean distance}, +} @ARTICLE{BA83, author = {Burt, Peter J and Adelson, Edward H}, title = {A multiresolution spline with application to image mosaics}, @@ -515,6 +530,25 @@ volume = {1}, organization = {IEEE} } +@ARTICLE{Lowe:2004:DIF:993451.996342, + author = {Lowe, David G.}, + title = {Distinctive Image Features from Scale-Invariant Keypoints}, + journal = {Int. J. Comput. Vision}, + issue_date = {November 2004}, + volume = {60}, + number = {2}, + month = nov, + year = {2004}, + issn = {0920-5691}, + pages = {91--110}, + numpages = {20}, + url = {https://doi.org/10.1023/B:VISI.0000029664.99615.94}, + doi = {10.1023/B:VISI.0000029664.99615.94}, + acmid = {996342}, + publisher = {Kluwer Academic Publishers}, + address = {Hingham, MA, USA}, + keywords = {image matching, invariant features, object recognition, scale invariance}, +} @INPROCEEDINGS{Lucas81, author = {Lucas, Bruce D and Kanade, Takeo and others}, title = {An iterative image registration technique with an application to stereo vision.}, diff --git a/doc/tutorials/features2d/feature_description/feature_description.markdown b/doc/tutorials/features2d/feature_description/feature_description.markdown index eea5a29c1d..ec3cd0e4c5 100644 --- a/doc/tutorials/features2d/feature_description/feature_description.markdown +++ b/doc/tutorials/features2d/feature_description/feature_description.markdown @@ -10,74 +10,35 @@ In this tutorial you will learn how to: to the keypoints. Specifically: - Use cv::xfeatures2d::SURF and its function cv::xfeatures2d::SURF::compute to perform the required calculations. - - Use a @ref cv::BFMatcher to match the features vector + - Use a @ref cv::DescriptorMatcher to match the features vector - Use the function @ref cv::drawMatches to draw the detected matches. +\warning You need the OpenCV contrib modules to be able to use the SURF features +(alternatives are ORB, KAZE, ... features). + Theory ------ Code ---- -This tutorial code's is shown lines below. -@code{.cpp} -#include -#include -#include "opencv2/core.hpp" -#include "opencv2/features2d.hpp" -#include "opencv2/highgui.hpp" -#include "opencv2/xfeatures2d.hpp" - -using namespace cv; -using namespace cv::xfeatures2d; - -void readme(); - -/* @function main */ -int main( int argc, char** argv ) -{ - if( argc != 3 ) - { return -1; } - - Mat img_1 = imread( argv[1], IMREAD_GRAYSCALE ); - Mat img_2 = imread( argv[2], IMREAD_GRAYSCALE ); - - if( !img_1.data || !img_2.data ) - { return -1; } - - //-- Step 1: Detect the keypoints using SURF Detector, compute the descriptors - int minHessian = 400; - - Ptr detector = SURF::create(); - detector->setHessianThreshold(minHessian); - - std::vector keypoints_1, keypoints_2; - Mat descriptors_1, descriptors_2; - - detector->detectAndCompute( img_1, Mat(), keypoints_1, descriptors_1 ); - detector->detectAndCompute( img_2, Mat(), keypoints_2, descriptors_2 ); - - //-- Step 2: Matching descriptor vectors with a brute force matcher - BFMatcher matcher(NORM_L2); - std::vector< DMatch > matches; - matcher.match( descriptors_1, descriptors_2, matches ); - - //-- Draw matches - Mat img_matches; - drawMatches( img_1, keypoints_1, img_2, keypoints_2, matches, img_matches ); - - //-- Show detected matches - imshow("Matches", img_matches ); - - waitKey(0); - - return 0; - } - - /* @function readme */ - void readme() - { std::cout << " Usage: ./SURF_descriptor " << std::endl; } -@endcode +@add_toggle_cpp +This tutorial code's is shown lines below. You can also download it from +[here](https://github.com/opencv/opencv/tree/master/samples/cpp/tutorial_code/features2D/feature_description/SURF_matching_Demo.cpp) +@include samples/cpp/tutorial_code/features2D/feature_description/SURF_matching_Demo.cpp +@end_toggle + +@add_toggle_java +This tutorial code's is shown lines below. You can also download it from +[here](https://github.com/opencv/opencv/tree/master/samples/java/tutorial_code/features2D/feature_description/SURFMatchingDemo.java) +@include samples/java/tutorial_code/features2D/feature_description/SURFMatchingDemo.java +@end_toggle + +@add_toggle_python +This tutorial code's is shown lines below. You can also download it from +[here](https://github.com/opencv/opencv/tree/master/samples/python/tutorial_code/features2D/feature_description/SURF_matching_Demo.py) +@include samples/python/tutorial_code/features2D/feature_description/SURF_matching_Demo.py +@end_toggle Explanation ----------- diff --git a/doc/tutorials/features2d/feature_detection/feature_detection.markdown b/doc/tutorials/features2d/feature_detection/feature_detection.markdown index 8b2f423ee2..d0996512ef 100644 --- a/doc/tutorials/features2d/feature_detection/feature_detection.markdown +++ b/doc/tutorials/features2d/feature_detection/feature_detection.markdown @@ -11,67 +11,32 @@ In this tutorial you will learn how to: detection process - Use the function @ref cv::drawKeypoints to draw the detected keypoints +\warning You need the OpenCV contrib modules to be able to use the SURF features +(alternatives are ORB, KAZE, ... features). + Theory ------ Code ---- -This tutorial code's is shown lines below. -@code{.cpp} -#include -#include -#include "opencv2/core.hpp" -#include "opencv2/features2d.hpp" -#include "opencv2/xfeatures2d.hpp" -#include "opencv2/highgui.hpp" - -using namespace cv; -using namespace cv::xfeatures2d; - -void readme(); - -/* @function main */ -int main( int argc, char** argv ) -{ - if( argc != 3 ) - { readme(); return -1; } - - Mat img_1 = imread( argv[1], IMREAD_GRAYSCALE ); - Mat img_2 = imread( argv[2], IMREAD_GRAYSCALE ); - - if( !img_1.data || !img_2.data ) - { std::cout<< " --(!) Error reading images " << std::endl; return -1; } - - //-- Step 1: Detect the keypoints using SURF Detector - int minHessian = 400; - - Ptr detector = SURF::create( minHessian ); - - std::vector keypoints_1, keypoints_2; - - detector->detect( img_1, keypoints_1 ); - detector->detect( img_2, keypoints_2 ); - - //-- Draw keypoints - Mat img_keypoints_1; Mat img_keypoints_2; - - drawKeypoints( img_1, keypoints_1, img_keypoints_1, Scalar::all(-1), DrawMatchesFlags::DEFAULT ); - drawKeypoints( img_2, keypoints_2, img_keypoints_2, Scalar::all(-1), DrawMatchesFlags::DEFAULT ); - - //-- Show detected (drawn) keypoints - imshow("Keypoints 1", img_keypoints_1 ); - imshow("Keypoints 2", img_keypoints_2 ); - - waitKey(0); +@add_toggle_cpp +This tutorial code's is shown lines below. You can also download it from +[here](https://github.com/opencv/opencv/tree/master/samples/cpp/tutorial_code/features2D/feature_detection/SURF_detection_Demo.cpp) +@include samples/cpp/tutorial_code/features2D/feature_detection/SURF_detection_Demo.cpp +@end_toggle - return 0; - } +@add_toggle_java +This tutorial code's is shown lines below. You can also download it from +[here](https://github.com/opencv/opencv/tree/master/samples/java/tutorial_code/features2D/feature_detection/SURFDetectionDemo.java) +@include samples/java/tutorial_code/features2D/feature_detection/SURFDetectionDemo.java +@end_toggle - /* @function readme */ - void readme() - { std::cout << " Usage: ./SURF_detector " << std::endl; } -@endcode +@add_toggle_python +This tutorial code's is shown lines below. You can also download it from +[here](https://github.com/opencv/opencv/tree/master/samples/python/tutorial_code/features2D/feature_detection/SURF_detection_Demo.py) +@include samples/python/tutorial_code/features2D/feature_detection/SURF_detection_Demo.py +@end_toggle Explanation ----------- @@ -79,10 +44,10 @@ Explanation Result ------ --# Here is the result of the feature detection applied to the first image: +-# Here is the result of the feature detection applied to the `box.png` image: ![](images/Feature_Detection_Result_a.jpg) --# And here is the result for the second image: +-# And here is the result for the `box_in_scene.png` image: ![](images/Feature_Detection_Result_b.jpg) diff --git a/doc/tutorials/features2d/feature_flann_matcher/feature_flann_matcher.markdown b/doc/tutorials/features2d/feature_flann_matcher/feature_flann_matcher.markdown index 8243b430a6..e7f865c3ce 100644 --- a/doc/tutorials/features2d/feature_flann_matcher/feature_flann_matcher.markdown +++ b/doc/tutorials/features2d/feature_flann_matcher/feature_flann_matcher.markdown @@ -9,114 +9,57 @@ In this tutorial you will learn how to: - Use the @ref cv::FlannBasedMatcher interface in order to perform a quick and efficient matching by using the @ref flann module +\warning You need the OpenCV contrib modules to be able to use the SURF features +(alternatives are ORB, KAZE, ... features). + Theory ------ +Classical feature descriptors (SIFT, SURF, ...) are usually compared and matched using the Euclidean distance (or L2-norm). +Since SIFT and SURF descriptors represent the histogram of oriented gradient (of the Haar wavelet response for SURF) +in a neighborhood, alternatives of the Euclidean distance are histogram-based metrics (\f$ \chi^{2} \f$, Earth Mover’s Distance (EMD), ...). + +Arandjelovic et al. proposed in @cite Arandjelovic:2012:TTE:2354409.2355123 to extend to the RootSIFT descriptor: +> a square root (Hellinger) kernel instead of the standard Euclidean distance to measure the similarity between SIFT descriptors +> leads to a dramatic performance boost in all stages of the pipeline. + +Binary descriptors (ORB, BRISK, ...) are matched using the Hamming distance. +This distance is equivalent to count the number of different elements for binary strings (population count after applying a XOR operation): +\f[ d_{hamming} \left ( a,b \right ) = \sum_{i=0}^{n-1} \left ( a_i \oplus b_i \right ) \f] + +To filter the matches, Lowe proposed in @cite Lowe:2004:DIF:993451.996342 to use a distance ratio test to try to eliminate false matches. +The distance ratio between the two nearest matches of a considered keypoint is computed and it is a good match when this value is below +a thresold. Indeed, this ratio allows helping to discriminate between ambiguous matches (distance ratio between the two nearest neighbors is +close to one) and well discriminated matches. The figure below from the SIFT paper illustrates the probability that a match is correct +based on the nearest-neighbor distance ratio test. + +![](images/Feature_FlannMatcher_Lowe_ratio_test.png) + +Alternative or additional filterering tests are: +- cross check test (good match \f$ \left( f_a, f_b \right) \f$ if feature \f$ f_b \f$ is the best match for \f$ f_a \f$ in \f$ I_b \f$ + and feature \f$ f_a \f$ is the best match for \f$ f_b \f$ in \f$ I_a \f$) +- geometric test (eliminate matches that do not fit to a geometric model, e.g. RANSAC or robust homography for planar objects) + Code ---- -This tutorial code's is shown lines below. -@code{.cpp} -/* - * @file SURF_FlannMatcher - * @brief SURF detector + descriptor + FLANN Matcher - * @author A. Huaman - */ - -#include -#include -#include -#include -#include "opencv2/core.hpp" -#include "opencv2/features2d.hpp" -#include "opencv2/imgcodecs.hpp" -#include "opencv2/highgui.hpp" -#include "opencv2/xfeatures2d.hpp" - -using namespace std; -using namespace cv; -using namespace cv::xfeatures2d; - -void readme(); - -/* - * @function main - * @brief Main function - */ -int main( int argc, char** argv ) -{ - if( argc != 3 ) - { readme(); return -1; } - - Mat img_1 = imread( argv[1], IMREAD_GRAYSCALE ); - Mat img_2 = imread( argv[2], IMREAD_GRAYSCALE ); - - if( !img_1.data || !img_2.data ) - { std::cout<< " --(!) Error reading images " << std::endl; return -1; } - - //-- Step 1: Detect the keypoints using SURF Detector, compute the descriptors - int minHessian = 400; - - Ptr detector = SURF::create(); - detector->setHessianThreshold(minHessian); - - std::vector keypoints_1, keypoints_2; - Mat descriptors_1, descriptors_2; - - detector->detectAndCompute( img_1, Mat(), keypoints_1, descriptors_1 ); - detector->detectAndCompute( img_2, Mat(), keypoints_2, descriptors_2 ); - - //-- Step 2: Matching descriptor vectors using FLANN matcher - FlannBasedMatcher matcher; - std::vector< DMatch > matches; - matcher.match( descriptors_1, descriptors_2, matches ); - - double max_dist = 0; double min_dist = 100; - - //-- Quick calculation of max and min distances between keypoints - for( int i = 0; i < descriptors_1.rows; i++ ) - { double dist = matches[i].distance; - if( dist < min_dist ) min_dist = dist; - if( dist > max_dist ) max_dist = dist; - } - - printf("-- Max dist : %f \n", max_dist ); - printf("-- Min dist : %f \n", min_dist ); - - //-- Draw only "good" matches (i.e. whose distance is less than 2*min_dist, - //-- or a small arbitrary value ( 0.02 ) in the event that min_dist is very - //-- small) - //-- PS.- radiusMatch can also be used here. - std::vector< DMatch > good_matches; - - for( int i = 0; i < descriptors_1.rows; i++ ) - { if( matches[i].distance <= max(2*min_dist, 0.02) ) - { good_matches.push_back( matches[i]); } - } - - //-- Draw only "good" matches - Mat img_matches; - drawMatches( img_1, keypoints_1, img_2, keypoints_2, - good_matches, img_matches, Scalar::all(-1), Scalar::all(-1), - vector(), DrawMatchesFlags::NOT_DRAW_SINGLE_POINTS ); - - //-- Show detected matches - imshow( "Good Matches", img_matches ); - - for( int i = 0; i < (int)good_matches.size(); i++ ) - { printf( "-- Good Match [%d] Keypoint 1: %d -- Keypoint 2: %d \n", i, good_matches[i].queryIdx, good_matches[i].trainIdx ); } - - waitKey(0); - - return 0; -} - -/* - * @function readme - */ -void readme() -{ std::cout << " Usage: ./SURF_FlannMatcher " << std::endl; } -@endcode +@add_toggle_cpp +This tutorial code's is shown lines below. You can also download it from +[here](https://github.com/opencv/opencv/tree/master/samples/cpp/tutorial_code/features2D/feature_flann_matcher/SURF_FLANN_matching_Demo.cpp) +@include samples/cpp/tutorial_code/features2D/feature_flann_matcher/SURF_FLANN_matching_Demo.cpp +@end_toggle + +@add_toggle_java +This tutorial code's is shown lines below. You can also download it from +[here](https://github.com/opencv/opencv/tree/master/samples/java/tutorial_code/features2D/feature_flann_matcher/SURFFLANNMatchingDemo.java) +@include samples/java/tutorial_code/features2D/feature_flann_matcher/SURFFLANNMatchingDemo.java +@end_toggle + +@add_toggle_python +This tutorial code's is shown lines below. You can also download it from +[here](https://github.com/opencv/opencv/tree/master/samples/python/tutorial_code/features2D/feature_flann_matcher/SURF_FLANN_matching_Demo.py) +@include samples/python/tutorial_code/features2D/feature_flann_matcher/SURF_FLANN_matching_Demo.py +@end_toggle Explanation ----------- @@ -124,10 +67,6 @@ Explanation Result ------ --# Here is the result of the feature detection applied to the first image: - - ![](images/Featur_FlannMatcher_Result.jpg) - --# Additionally, we get as console output the keypoints filtered: +- Here is the result of the SURF feature matching using the distance ratio test: - ![](images/Feature_FlannMatcher_Keypoints_Result.jpg) + ![](images/Feature_FlannMatcher_Result_ratio_test.jpg) diff --git a/doc/tutorials/features2d/feature_flann_matcher/images/Feature_FlannMatcher_Lowe_ratio_test.png b/doc/tutorials/features2d/feature_flann_matcher/images/Feature_FlannMatcher_Lowe_ratio_test.png new file mode 100644 index 0000000000000000000000000000000000000000..f56a640b6798d2c0134c0448d39139eb40618011 GIT binary patch literal 54032 zcmce;by!qi8#N4~h)5%%l+xWLAdN_efP{o}hjf=nN_UrZ3DS*pmvncGboaXl^%u|c zT<`b(_04s`49uJ}`|Q2XzSq6(wYHy()N5o!97Grx7-Ufqp*JuvcZy(OVAB!qfd2$i z8q)y0li>mb$uTRtDzQJ8%s= zFfh+yM1}a}?Gt_`+Nol$J!?-kVe=(=F84}E7VVy*!e+M0hhTrKX{WNc#Wp{5*A^01+pmJ6pD zW6o86!y?ir1Y|V1fnP4M>$A-79{4>@gugjms(6cIy+aC)8Q{}wME4FP@N{#KYIt@f zQv_?f__dwCM3YClPpY_Z2(3d4H5mWvE@4qS%zZPH#$gE`NjtNfO(bMkaI||0?kOS~ zx7wuPd|*?B`VOvv`+oA(q}y(+8TQ;6(r`}c;8H6-{M=Jp>h)xgbCE~&IU2!gZ`;E! z$u{$hi@+(|;qUg~8vOJXyfP=9`W(M%^>N+D){eeM2lrYO5d! zSGn08^X#RKc20k4qjiNmJJ2xwITiaZI2_H>EM-s-0r8XsP{5^-_i%Jp`3GP1Dr^!B#OAMEcVAtCJ_959Y}dfwsYuKWI7 zK%y)=TjcfYtjfwKa##p=Ra8_+Nl6uq-oA~Jp@~9QbEidbEVm^HMFgz@5U4n*=PD(^1g;|y;w56?WC{_fmrN(Y&^H_;2TvZ00egWOQfH@wgM*-fr)@>`n^zYnRaI5srIeP|q6gf4`Ad`s z6v21~(-G>&25q>O3^kY6pG4tn3GXvgn?_ja#ANn$EOD&h&Lr!%+RQ#8RbVjKnXO}G zW!?M9z{({Rp6{Guv2!h*}_UdaBHz{tA#K_N~k5YmV;z+U1dLOnHtzspbOlEXKp#!VCRCi|h zw0(40&BBZa&qj>RGZ9;#N?$8|c+sRev}~s86>>NuxdZ3QUv+or8N*Pdi`$w%^WeR^ z7#Xv3CEGvSQOwM?uq1K8)v`1+8ySGdfaCecAJB(9Cnm4UdCS_t26Xi3sHoDIWl1pn zgwp8Evp73~S*^*4yby06>k>NOCQR50hHEQT>7u(c{%RV%OS;d9$^(tWtLX>@^TBMTL(}z2iUwVtA%}i+DOepjr-uws* z479PfepJxv@$DOPuQ|7wnORU!kTu!y&K&Ra=k9$%;9eLj(io04c>jKPwHKEYQ<-LT zs=|CjLI|PG`E01I&AYE9Lp&M>7k4=Jh52ml@z&IEY<6lY|83`)%~QJ@bFJ7583_!e z`}EIcw5Y1AXTPEwi*;3_LY=mHnV zThw%Pnass@JG0;(v?k-^;$k%KdCCnt6vK7e;qX=N1^?XEA+4o_@h9#q1*-vRfs79|fp zEPQEM*}HNRQGG2{)#*4^<8=uMDJk`0Eimg|y?XUculIITC1aUBL7TX}y)C6rBw(;T zU3GSLW=*D|q%?4uSe=tY*(>J|BSGfVoXU)zjR4}oJunl<2T1G}xy$4%o z&OE(7XJ~f`Lq*mlpc5K~54I8(&mFfBJV}6C%ZQI1Mh3sN8QJ{>Q_Aw@LOs z`|e_E04yB5^XaZ;ZyalPG(9%L-7?1)m9R$wkRt2>B^Y*hI19KJGSRpx9E_~2BTY>n zeL_6R=4NKU=*haWxOZ-IUZbiBbazYdMbo~rd|X=xc68mhg0 zO!7RQc6WsRcNc=jUYzWloSbMBYrTSu7Tib1d4z?fK+O$f##iT#3q$d{9pK52vl138 zbG*C_4hO5fN!%CPGc{#bnK0m_01u?VOv9iwBG1DFQ8o(U2T?Mxu+-W>M*9-E&;`oN z%gZG1vZts6Ao0%=R`=o}prF`VTIRkZ0vG-t1O8)6)oZCe_!*0V%nU{oayIrlJPacA z8*aboeHw}x9|^_=1oBQrWt0{jv_i9E#nNU6PR86fA>?5@ecfueWP-3`^$A1Hd9Z5K z`K8hOlFFgrxSYAYn$#fgh^v%e_f>L#dwIKpsD#Al;NYTzEJT>oY7V#S+W7eR!a_Pz zzw7I3u0Z96Rcz&iT>1L0hg0nRv%?sGmT zR{oT|cLave3(1i_W--j>!YCzs|E;Q8^Qth(&lFx)4zhUHb@NvRFn8G9@Bf-TV5B`N zD=Rtil$20z$AwS(HQou~C*9%M@}Gg8h9<+K&#AZ7%01~}?@CwLCla|{&Q-&nvmzn3 zKf0X&uy}Irev<@p&m+aiCFA6uO(3;Df{q^!hgZs1natF@?u%OYoUHUff5e9rH)Ohg z?d)gHn-yBUC*G_fAmCkz z&Vxk1&d$z*wf-#T;_PgLp)aD=Wd1^FAC}rQac;oG!OBymyE)w>=0_ub9YkVItqh`= zd@;sd*{RKih0yr;?zMvlo)kq)vH51K02gURie-%CMMXUp5E)b{(bLh9&&TllV#dnM zEJqZ&Yya|bKxAwzCHS4wpTy0~&JOO^74edi5~{bMBJy=@Aij8cy>vN`NN_?h;|t_t zANT{Am|=fGCjPQwmc#W_a>u18dnK2JHM&p{ODik4)1J+dJamB?+pX`4_wIO9RIqPH z_?M~G*nV-$pdIY$5|fh)?-No}QoIq+w{_N~52wFPa_Pg&zMIR`m;&YOqA()t$yuj;g`glJe zAfQu>jvTVk>;rwdxwIrFFE2C-y7C>k=+*Yd+j*ivI$r@r}Vy!D{eTm}I(&Uh{gEbI4lwDk0zz`pvA3=Dp zPn-MucfcgjIXXI$6c;BpZe78qQxyTPAmyf0U32dIq^$cn_?dnAz3ti6 z!*C>fK@`0=CRC1R#;=ol!(bM$?;q{%GJr|2V&>}i`Sa(mUl}+twY9Ysi_|6cwcfl5 zi;l)l^aJn>H1!L!EX5;7N5@-O+v110AraBNVzt`S+1;Jayf^VpAD~!kGWdJs`3$=n8rKz~}Sc7UcFL@VUT%TXx|E=VW^J{Co-h>6g$Z%{#{TU;8V=mY|*xv{kI% zxY(&5*(kU>z`gkW6O+AF>tQ-o4$eG6j=p-kO5$+Q95fMCA2l$IS8B?!zwH3!K#Tt*IyFa{b$45}4lFh!!y5?&U)Pzf#|}Kk8W79~c-o zPcVULV!XErU+p1`ns=06=Om2%%;MY7h}lXtY1Lfv>sq`C1VR$$Vz)uhg0bNaGrqW} z7%D@<8R6J~&z`q|8yc2{OlKOKeNd5+jgt~D?={}Gvi?qGxm?ieFJyPQ9ZxUS#CkG5 z1*t4sCT@&n><=KLER!SJW|f(HJ#cEc+CRBF55M~>@H5C#v+n}zc6fN`Nal$vLqCA* zO!^f`#SSuD@!E;FkV+Wt??qOn%7a?5@Gj_ZX6PdO&oDI&LuRxB@A>g#VnV`ZEVyOs z8f_3^5a&7X!3?Ar%#TruNK20m4--p=%IkxFZr+>z7aF11fd}_~5FW>y{fC+Y#0H~+7oC*Z*TRi_gp(zzJg;$QI@5DfnC5O|Q{(bCdBL_~ao)b%iH z6aaz$YV!tsT6#J@K0ag)@i}-K{?{eJ7|5_FvAd&%iDgi2$U_Im;$HwN^$YHJsiE9e zzo^6=@TIpGvId4W!5rqkMLnzpg+sjuScQ#^jB-iP`~EA+KhT|a7b2php!ix*p(uJA z=MTU&y`P1FVWGg;uZ2~BJ@_x|BGZPP!0lvvvOSY4_6IopbJok~%{GF&TjBHu_t%{N zCo&Lt$}aQf&En6W`ME;SUReD#G~hbH;J~o|yGIuT%$_qbS^mhDrIPqx-*mcI>rr7o zH+ob32@8zW=DliIch5|4juiAkm>Fc}h?r1(qT*Huql(5sh#jQi*c%=HBJj|9 zM^oS`ST$n*aJp&=J+`C8p{LZZPuTxmlQ0Bc=NX;J<%R5H4>SQJ`0&mVMxz;<-P1wt zcOTFNN>H)cw;U2AL#UNi@dc^d`L%@)%Lj;_N$#3oRc$$1u8{W)SSFR3I!24RQ4TE| z$GcvvX_l1s{%{NG*&2_z_V*Ii+?!0?GCp0l?e6tC0{Kn>JZ6raj@R8`>aIzVm z>IMVx7X~>cJdIvE{K-1S#c+MKbailUd$Xs5(aO7@)_8Pc=0i?TLjDM2C|S_IWTvVl<#{#>e8h z+U>klbc)`{;tfHJ>TunHxEyZR)Ow0V_~zBVA-y)%GeOoldCI=(l=U`nLs}&2tGRpy z<=W7ro8>(us?XJ{TUPWQZwId;2M4CzRp}}~0H&jRt9d)G3LMj4KdM|B3;!JZPfjSe z*RJgxDev%863bzK>l7~v7rOP(OxPw5;JW&kC{WX|2VF1pQG zbUF!b7dZ5pO_i(1Svi!j3HWNhv_2Y4tm_{2$L#Na`qHMUM{0(E4#HQ?>*(Uj<7grz z=^a6_co6Uzo?fNFuSYpvQ#e0ZB&7Qs%n>fx%sIB>2xl_wOm-mge*XR zyzs_TZjdN#GUbOf3Srv(QP5scTO6L*H(NNjD%I7ZM_aDf-Q3|d@7RamTOH9Pw3}8d ze(W&90lg`ss`}DlUuWcZ9AL3+`S9$G65Ez(_bf$B8=EO|$>OZ8qT>k-BC52}-~^NI zN&ZfPiRsS^DV~t`lf9!d3(^g;H6FRd4G!qCX{Iw$gIRdZ<`;Ilq<8DY6t{1Xhf8;Y zncDY96?md*>^4}ulKE5&Du9x ziD>^ZdrTiQ+>_=YI}~bqZa0^U;8{vHhraP=^wg;Y2nqy-5o#+RI2O3t0rC|?}p0D8g2jn#Szyt&dB>2&Xyon z_wVsuel3fh3y8jMm}=intXNe+7kG28hTgSKd$g+p|E-4W)8ow{%e;BX;jwA?jIe77 zkQx;g(R+I+NDFdhHqOqvc1O|XLK8Z@WxjObsCx6~WAp6+8Ohkb1oURTTjh*mDw@QG zmqF@yGf`xZx%GEm*iPy%kvORp&Pby^#0moVTT@TZ+o~ZiEHo7T@#D8UnqH{bDvU_g zPIQ2;+}~HJw{D7PvUL=ha@ex+P^H7>QJMCha&<__9?}-0mtvZKaZNxID*k=UPM4TX z@qpKw%RylRP);tBT<-d8CrL7N^oLgL%SYpn)sBt2hoplHGZ)SkJzu!Y@uBE9&IXql z)lfLrR!J#jKTA@zH&41()Lw3v>^9D!-}ZFV+3kgz zFg{VVSCQ4CjS4_;V}HCo0uRiJ&diQ3*0`qOA21Fd>#b;DQg(LVB~iss4pNZsB#3qmheKcVChNu; zx*|JQEDgI{lHtaPux`pn6UC$H0-rL&H#g?4{R2d5XAhQd|aqflk42oVt5)^PNd^i8^e9t(krMPN9Z+ue?7zQ+@(LV>?-TYn;@!C z5gwOw9U~(Z8JU!#qMiA93xKXUtXA5=*psjPeNr zha&z)gHuUxD*DFI$^Et$2~-yj0VQ_@t~!($brA%H5I#wNN*x{@jr#;bDXpnwxnCp} z2f4sI@o;_-T?EEJRWf^n)8wgIn!_s})Ey!LA`D%WyRsYdnPA09O+aWFFdXhnaJH4; zcEKm1p^c=NbjcCE$w_*vJ$hnd(|8^4AvoP8mGpA5-aCAEYr4v6tWeE=OfEbzkv6;& z6ifnIhsMU%_Kescv@|w8CnNK=RCxdXy?}r~Je%p_(vqo(36JyXdu?q@Y;1jFV_aNZ zV`Jls>nkTNEKyKH3;w14O;c#W5))oLpK}O%3kO zon_0jL@qnwKpbdJcYSri>wIc4-{^r#@bWnXoLi&b+0e|)6_Dv5DV(cwvfG_+vfG}1 z%4rR4$4tIDUGTT{iPP4a}S-wO`|X~UbcOOoa_ z;zsQDmHB;Kuc~dXZO^?J>Bk)Fd zTJybq_eQcz9jLV77Du28C)=Dv>jkm4YH$E=vlem|#kQ&H>X4{1)^np4+H=nDoot&JE8nfM+j8w$Mjo+UvH+@&KZ(r+QCK=C$jt*Am$AH@?cvrEyXRlLM zUJjnX-u`YJdAi{9 z%>HGP2+8JT88ygihzF2aY+nQg9UItLd_{A~@<0m{hWB}pKZAEfZTe*{L;Rif^$Zsy z(+D2|q3mRqiQdDd$NsbZBp44qHZg$W4J#|HhDee#vYe!Z1WCgec|9krJ_{1dZM>={ zb3>*V6)FfL=f=wA3dZ8LMmj=tgUw!kXaX6j3m+`4#zswqI1JGVTTIMMbE2P}T!$l- zX%c$r6u-tU62~hU5*y@WZDcal&Y=|<-Z@IwcX}*~*dA{=1YJ*T#5{URGK0j+vdvN)4P1qi1e2vm?dU!A923 zF=u9tQQnlsiWg+NX@X+LCPUQ>D6c$6^KD&((sswitj!>PsM0y4qeNZ?D`o7TIqxTv zuBb%HmjqH5DJv_Fj*gnJ)pS{Ug@AGcV2aKt_9Kg=M?jjY!Cs{b2+_bsn(u8W-A2(|2#fui6&0jiaDiH`bYT{?_UwT?TXt9m^IYA5gGPVin6i6evUi6o| zVS=wgWq*6PbmJdHne9zNf21Z4eb^7A2e|b<<2i`XInA7#Ic&HTL5xE^u^z6k&`mvo z;N|2~w4FpNtUXknCb!OBNUj`p7uA;dczw2Ipo7l;=vA_6#fFHChiiOj836*}7hdbG zm?(2w+VS)GbcD_l^($_duaFL8*jI=GXdkD}=o3p3!GoEHY6q)j8;Wgf1|o*2DpM``+oa{mEJ-&Osv&k`&{P!8hO~{4}n2(cXEU%+$*RydSaugzmL!E2*g@sw&gG4DqpuvoGtsI!8)IdZ7N9uvT>y`n63cRHl9eY}y2_|0 zhOxfBo=W-$sEqFKzd8Hr9@dBz*npvP=9e03_=cD(>sZ*off0K@U0^A|dWv&Ao+?Sf z2W|iG(D`s(Rwa&TrofSjnR$44*pR2ezbj<5%||$k7?bOM{Hj==;_aUu7(oVXPn7=9Dw>snKw>i2eOb5Z)Wp(z-EH-+Y1cM*A&m1q zmIN#GFmjH@P=qxid3p;kLTGgiO5WhH4}ch;Ugx+V0%b57Z!2Oz;P9&X0~|a&Jh$T^ zIOY#?b6YJyZb6QSh&Y6dp$<=5K`D}YN*D@*6f?f#h3{<*&qAZebQxkK3WH=Jg&*64 zw_Ice@)|LXVso`j?9MTz?j3wCZFxs~fDgBCE zvc7zy!i|7Td`jYKtz$Z<>=G0G<|0je5;FnRxJLe)bnuEeEs8B>4VlrOj@e068M)pO412skFRN4V zOc=Gq%O$;9k|R2Ex;0*bJ)2=OuA;y0uF9(uMPaxPaG+d2`Bf7x^Oj{&v2pi=--JgK z@HITY+9B^M5*1pWja&O!ZVJiZh;AqLdeSDS_-tmP#w6p#-ep3Q9CpvoA6pA0C6LAS zqqDR1VszHTF7Lnj1EsA(Eje$Gi{d9VJ{mc9s-9DerwqM?8B5{A!turJ=;&x<`Gf{# z9-({|PBv~f%J6{Ad&PpqmT?yU{>16g?V9qSVdn)an(SN{8TbEU1wvkIOinVYsTh7! z#-W{bt5~*<0Q85Y%i3|wDJ1U0R=gpOwfb?&BHEd&Rl<2b(4T z$zL%@ApgxL7P()TfGM_p!{GP9fTj|V9t7>%fBC9#C z!Cq{3Sbu+pkuS+o&$Q@$ZJ}q*(>2mO2};Ei-3>4LSA1xBjD0HD-!oUz*7R~>VcFd6yqG+MIOK>-+Y&-$-ug8-HolE3x<9I;VeuMN5>6EOxbwMue!o^JK>$zjJ}j zn0+_8TbKS*+DyM&mA<7NL=VTLFzM4lrR2$Jt~onMbcAL*dV8__gs7frl(C?(e|_KN zSy}l)0$Qs59M}IDiA4lw-3F1OP(TK6H=I_`?l1?hYIksvZTh$yj#?w_`N4n;@^*zA zIkg@79p&=?tD2WwU*nH7>gU!Rt#ju66v-v)`i5;a+^+YViZ#x0KG3|R&+OLOxPZ3| z@xJO=l1@_Jb&6A?+--#U!Yc}-65mi=+ZT3wRO_T(B04&l9-wIpA3URf+_jPx64<9Q znV7XxgL}BTkoq){swt3-fwJhYz|_*`sj<`sZsM zIfCDbZ)RB9!c5sLx0;r<~9Nniz2_kG0-gG+ytmiU>AD4wt%AXw+4fq(!6T zLA()iRHL@oJH5t^eHzT~y%MUet~XZ24Z2h>+vO}e`>~F`K1h7#s%@yLsk@t+ z7Ebq;sPNT~SiB&QKuwC4wjeAFL&DD@@y=}(*u%%iCpPwJq8}*w0H~=kE^=y-iq)J_ zv&KXZ9z`qS*b+}ovV2Mt$9S+iu&m)Su*siV;JKla)xff8S+U*-R)j0QAA#U1)RM@i zCf5KFS^0X&!_|)eK?Q7HDCOY3|K@nT=iywszPC8~lg_xAB&|K*ofYThc`l98u6_6N zMvaCJl`miibQS8!v1jA;n;^woGi%i6%Lz~Qvk$Hote@KWs$$gl`HdOceErfTrSF8Q z#h6ghA_4_;_I5!}nG!#ItUXp(z4kDH3(f;^colNs6m(A~id%nhIN8*`FFrj33U1Z);9O$@=$-W9!y*7E zK&BB<0mzf@-@oThUtgWfAtNLA_VfsUegb`adU|?y*TOQMs)Cq^sACF*AuMz1S>lm{ zpA*c^H{O&F5D~k&^M0@(e)rrxx> zo>>#;XlvMS9G!qR)(cH=Zu;t;Kq*_GmOy}uRW7{aYO)7JF1LE-A;6#2@(%w?4W-B@S^qknBPD1|JIt^|4jc)|3A}z8RkuL9~r5; zTI;2T`s&AT);A!}H4Srw2Dimo&!vw_alX~-=fNG%A>l2u+3qh79P`-RyJr zMpj{k&hwolB^rK5HS(gE(S?R5-clE~CZY@qB7tA2U9`J(7;^K;PowpY5}n@;q-r#n zB{n4TX!dlMsqf^X2wW|9>`^y7vUEVM364XaxMH#%KV!x&nscyU9{W~f7c_pe++~7$ zuOSR1mH1q?G!9Mx{b{#-&T*aU=`lH6A6z!Iva(XVeMn17v$I#orVE3PFb5eWctC6N z6~(2cc#K+i9j3avau>HiNm*{^z7)>)S`Sbxe5b6uU@Z3jHwa20tnpCcTb|~yWUSAa zIyNsu%e`7KHOU)|`uU^7S2fM&0VbiBm|*kL@@iqhMD}*HqaIsD`H${kryEi4$**Vv zCFAJvC9+uI$Z3zP_5;3}7~(1#bjGH#aw8Vq%5B&wk$X@PNkY;-Ug&Q5kv*30OXUI9oVY z;!bfFY%J)5;Bb9WcCOw%B=X~I@*VI8fzlDuNdna@;No;7kN?z z>l_UVV8h#D;I9s*20Ms6Dg1e=%!Wf#?p(1ELohP_UlJ|LdEWSDE#GfpVfznAx{$GT0)EacI##4gYaqHy|ykd`xNDr#Hd?lL46Nt~ffVN<^ z+Qt|QCFmo#Hb5u@Z6vgi{E<8bjv#kn1XXg`ZvM!zS}SxBrssKL%6&8a@!Rm!kFp{l z)cCsAlWWW0?O6I_h^aK$QO&%qSgv7w?0{R@ZugiOm$Odg=YZ_$!isdBIiq757m%qhm-QYbLPJ+wvuPQN9e0N@hxkl{Yp^9TIE{4^CKN^Gs_2! zsvnG+uQf2$@J++7YlM;-oQuLA6NC!N<3w;)l-HQQaD{A(?#rdCKp>kNamYMPaW)oH z*_`Fp8V`&br1%{j&w&i7tgLL06k2db;z(kaf(yivmhOKYOpuW=qUJjm1@wN0ht_dw z*acp?--UC9O!;s43VDevm7RLo=UgF>&K(q|_q*z7mv+u@hXStof%5{%C0w zlq4*X40h|IDh;E__k7cqzkbz8^$66?daDu{rAQEQ-Z^4<;ArL9Zq z=+x)|5)x1p=4ED*nfd|duei9Fk%8g)&N+w#=jZCx)(g%u5UImn!OnB1Y$%c)A zLT9BJ7=m$c)5MF?RtAGPqmQGs{u$;oE4aoh0x{L2Gv{wi^N=LesIz$9%6C(v0$}wh z_5DdKr(U8{?1(0PgV+xq^XDWaEuEbYjBjghAQj?IPXx(}2Dwo(`&autEi%j<_HJm& z8&q4NdNfc^m$|6*NPit%ZZLS~@1x`_;DA(wpP%2F>@hZWgY#KfpOBvr1{xZlJ`q0} z>NB1%Q2l5UiZ7sE9f%%}SrX_0nK&-ca7@+M(F48tpS2PZDg=wvsPHat34X>q6iX4_ z`R2`=bqNv#BA}4#6Y`SL(jw~h1H3iMjYswv@&JS3ZONX&d_K>;<3W|h<&bgxbc^Aa zEGJY<|KKAbv+*sb!Fwywp4vEe_>3uaZ;z1Cjd0A|+n36WM*LTWn3+|wT7mXfTwI(J zGgcxq607y4VE%wxISlwu+zsCk2s}J0!Ox#RKXd((9i;3D?$Y;foTNY7WNnmkKAZ2W z8*@%dNYqk0D#$%tunm6JzY=K&H|<(8>ozqTr-I^v+ZX|aUH6SXzya}@HCgUS%pG%c zb9Q$2>03FWvmB6%-P=ArBGF!rOnI$%1t(}mYr5TFbdb21`%aWss09K4?Y&I>sJ)Fz zXYo(GE*5YxKtp${oujJu09+ztu3DAA3Um>Ia&axG*A-!=ZIV{3Ag}&7`!KUoXQm!U z>B!&J)7MwXB}-i`FtE#@uqgG^2h|Kl4vhS*&>^B9x1+n8nu4Oy@kjsznn+fAv<1D` z7DVjMtTUZxhyP0#FSJsS)~xLXe^#N7vTigso0@zvNnI@9-^2s=`+Ptq!e51JFq*F{ zCWct)QAF^+u1y)3ACetGJVO-4Y=>^ zjg2bjGfS@l-kZyhMTmDj0r9yYm80~EXb^-NL#e~PJ)p}a3&-cM^y(Axd>%x=t>V)T zl}FJ<-^!z~sDY}A+hQJQGV;{H)S`SFD7*Nd$R5Y>U4i=}-GmL{k$?1R5Heju!uSVUeou>fTPYO;4**MaIR)D^7%|zNs(QS~)nIskRYB02MPzUz>_!03OK~8S?=TDRS_wPRrctuA!CrlR| z9~1N33-CJ2mQ^`9hDTzB1?qVw+9oDF?-@P^26mP?I5^na+T!5gfR3#wE&VxFL0?tw zsLK>f`>s46`cWMnAm9gzGAc9T#{qzGl9ZGL3_f7{PkPw_=zM{;#s$0=|5Kngdiq>+ z7w2{1%H@T?JqWo}f=IpcfUtnT_d2H&pgr&CsB}D{O7sJzC2V^2FU$aD9RjHx@G+2t zfn^x1kkQ>h)+c1O2(;=zNw+TX(r#N7^jV@Gpuv_7&Md2@J3A?fh&;W8fLsVDN8Fz?QYLC@&58}4gaBqs z4v3f@S&$L???)r#^*8l~~ zkR5v+>gl}|43YGYWx_H2lM3~WtzI;|i_1fE^Z^Ztt7duYS4co?9D(phbm}0%Z(*?u z3SNdW0>1MxcWmHh_N@|n8K}y zC3NDkPr{j^fj<)_aX)lNK_nXJ64hAz5I*oF z1?&O|)3UQD&aX&zD)#GtTFcyb7f1m`h}tRBlhR&5dXuQrgklea?X)684BHI#R{ z)d9{QoPl)&NFNCiK9Q5j(*8kT|7d$mTpSrG>B`y~b1chM{GZSXgvgZWzjWK~{cBkn z8G{aVP-WPyWC|?Ew=oDv>E9a}8EI=X8xE2DYQQgn*k2v9d**CscLJ1=zux_&(|!+x zTelK+1~xVnB&06~Sr7i^Aa5fGbj1g{(jxUb;7~FU|D(S8N23}+QM124xbK!=1HvnS zCjJUz0BX9O0WLoD@8;_0UQDrg}+6Ax19w}4F^=BTwGlL>oBUwG-dJ zdzK5`HRGG%V^1;i?fE*zKD%$o`Hh{d5&6$WQLO>vk9ZUfI^w5NQX&E$Nd!ET}ysBauK}ucR$KYSy)(rY>AbH#XwvLj1sanoIQH`Wz9uLb7|Ws z@;^Qql#!teB*BfyER;}5BJeZ(>Fr2nPz3h!x@!R^iTLkv7zs;EoB`!Ko@7+B6w%+F z2Pmfn%o2QMUS3|-8$+!>emt7y3%^rO+2~%oJF)$g06PZcKcv|t+>YJW!H++eL?)OW z1krA#_sz)t>25w+v&PoePkk+*S_PIs#}eQlVRJe*xwRumNfQC137~D>jtZa*&@r(aGj56V6qBg+V*s>8GBb5vf={S z--lwoQB_Tdiqg~9S5#3!g!aZa1V-5~i#eyQgb#BrON&1;-W9>7>*rst*_tp$Q9xaY z{p#Q9;FjiiXL*avPo7DWQC=%aJMX_^=;_s-`j{3674P5PEMR59A_^6;{W;#GRXiW# z2{oKN2_Z{JoGm!83$UFNCqtq&nN((dZ5WI~b2Z!a!PCyG>!w!7gUgk>Gsb~*eNf@1 zG31?i{6*2E^K1S&!a!~onMAJ>+cWqMcW03 zG3H+GHaIVQUxA>`k+?d#ZDAk07SA$!C~NwW{(no#&|5P}&97tX35TslDj#c{7k~Wg z_LlF+l#DM_t@1>>!#?oC-HdMp%~0_wDURl430M(l{JYC0KaX3RWOQGMV+$z6kq2Fb zd@nIHe|KE$@y7~aTh>js!mG-y&1>GeS2n}a@PYzZc79L2Ptd8iHHihr(Gfb>Ukr1c z)z=CWDKpoLdz?7qT`tjzx?Tv#C{YX#jI6k*H~ttN6n^#T-hNm79zAWWOU}%**U{E* zR*NKwLV-Eu?BW8deh0n`pvx3SsB^h^tN~n%02?(Yz9|L{0CMRh=NUhySejd3nCJ7& zs3&-M`Pp47mL))=FEi@y4&(V8WZ}6#fWp~dfh=612*5A@TzM)|V|#o1$a6r7g^D~P zAOH&e+%aEYUw;Lf$BIRvp`re{A|yn{pd{+xa(!9F_nbn5=n-Px*!udqe_3g1sbip@ zpRByNm>BRZto9{#uULZDo}QjS<_{DSiXbUbzjufK=o~OZ{$;>m;RvFBMm|tf>;jcK zsL&rBMT$UEXCQva6{bTMXDMZ&)KJO{3d&RxBHe=^g*Pue=h#br5Q52chcPBH6JCTD zFZN(K5B37pUWxj|!Mk}OA}WgDyr<#jIsmQO?S`j5x1azY7dL<_Lu+Nv{)=WNb7<*$ zDJd{4?$gj{vy6%O7$CAhKmuLcT&&w+>SY@Rp+A z-CG|d_Iysj>q-;e*>Mj3$dMNt_3s8+kuSh*@C^JvRHOq34ej~kfX?&fPE0=`fl;dN zS46UUdYST6AUx$L(U4J3JPQGOlPsvy04w@FJUrqjff634y6iwk2m$HdC*W(U`Qg|&mO*QRq)1@ zUV&j9pno7=kgrbGcuq*TFk9zjO$J;is&$T$eJvn&1a9An2FzvMc=h70u2+IJiH8Qm$j_7lNzAqy}2=rInRew=Tjt82IH@ zbE)fcnw0Z)3kOir9TX_j#C!ePsg&!5l5hPmAA%_H$L71EWP&{h zS@uSG8ot+qL7!{o8*GB=)c-&_87x9VLMbV!WmBLxyInRI?0Y5RR_bckFTyNY+kZ%j zXE@wC5!j1V>TJKvwDC`malOTpvQT2F!o?;eaj_d-ll@TPx1VuEd3j2`L|?4dnVf^EBq&*0|e9Gn2tgf%F`~ zR^THI=ak3)i1`AoqHqY)e8X_nu#v=w(=W7pG3)l;1toFsWn~Q>j3gNoApWS6E4w=l z^Bc^hV4R=q}aIdPmIN10xB*p^INZ z@t3>U=|)yE6%QqVA<(N0R24?+LCm_NW2N*ar!>?C?ys|ZoQEeQ^sHEdwXQq*A12P< z5HOn;NLiLC)4G9Q?FW$fKMBEZ;I6XdaG(=#7(I~CoUIz^{@BGMYVniYG=5exu_g<6 zL4G&f;~XTyiTejAp})?XV)Ud|kF3{{Iw>Gz>`q*6f}ct6W` zG$V|jlFnxZz8h<0{AK@S@fxEe)OW;@EK&=UXaO2T3cnA2#6&`#3ZD;+{6XYNY zVHL^n-TB+G`a6WCBqh9bu>9`z;C~*63c&xv$29YM_6kuS|NFG4;ItrijYR*i_jcwQ zig=)O7&J8Bnrr{G1{D~ZrfclXk5>L+v;CbAY&JuA31`>;`QB#{lKrD2Rwkz0dcw$&v(^dRcmWPz3j>Vo8eE&J1QW%T2E|Tc-srtNOOzt7x!zjs$WRHc&yR<8uzc6&X-u_bW-Ty0lPx=Ydmqd z(;D#`l5~1%F%H*r-QIT;Wb|Gsn%2vyI^Yr=wM_EPos3^Goo+|EC3~qR7p*jG>|_yr z(gsQ7pWgc{L1txb{YCZO?Lb4#m78wV6MEAuif~?NOzU;;{l!PO@;nr-hardW9OuW& zpG|#57ZB<0nv9WEm8Ver2e?Yy)#QJ4@4#fX3?ei!~(rKzB7+FF)z8qfp z!%m5xKQ3du4bCcKm#9YE)2n5T9o=DDVQXau{glP}XMZgyGVB0!0)O740_Z~EZk7`I zYnzJd>Xp|Bj7Qd`va*`6Blq2uFRSMC=x62+1oY-=(lwFI^?(bUDiX+oA3u5oAP9I6 zd9`RU)>8qxtG`!C4r-DVa`n$(W~8L_>l2EVhzbdj@o5L2z62E^8xcU&^6;p4Zgrx1 z9urKrwayXr((EIHu)F*+A78fUa`w(Q*L# z!otEp$Olzq3pgWT6c#f}6y%l7nu|!+dV1%|Mt?{xhf3})ddak+8i8JCOcTI42?<$WSl~yytIW;Kt;_UOIZGmz2yD~92@H);9Ar40>DQ`N zg7TC}hy+72nMaKTqc?_85YUA-t39z$za@}QzJ>ZNfgYC@^cpAN`~W9b0u0m@+1bAN zz;aDZLqh@qo@3zg>Zk^a1u#|s6X@&d(WzE4(9;L!2Rx?j>g^?k0BgCGrDaF8PG=Y( ztxB#Bhou3HrgL?+mdwn|+*u9sz|WtNO0M_2=|QPdN3GoCX>va~c@eH$R^~VC{+Yv}n!*yI-oY?4`>jf<->8E^9CI%Y_kQsyQy)T)!0WfA?zI++U zQzR!N3$p>^11#{&dOZlp$XB2iy|lE{Sq;=wfYcAM0Z|f+u#Q@TtBZ*VjWWB*_%0Ab zmzYgg?g6E%&J6IHNk||m1F{GYA7645omkkSN|i0Xcf~SRVihojkLoC4D?Q9oisk+r zYMt?44}a!R8}ku=l%M?KLmH^cQrR7dlaP>rr7XCw1<-8(BM!yJ04wU=;ww%}K-fzI zGyT+SK`0%zj7=&1D$+9WV^og< z=g}(iyr@aX0X$GzLnYHlPxpbZ5$Kl<4Gf+DZT95kC>3yYz86?IfCsai6Hs)`@C@eA z-3<8tA9wn&ur{KutQm&b>!1^MqjFszWa4J;v<$ulT3V3BO86lk#CU@J0RXI>n3^h5 zt+L!-2A1khuzn`9nX2&dH3F7ntl(XiGZ8R!KZtxLhlYWnWooL{3~wR9F9+B6YJ6R@ zEUC<{A2RrGCjVNC`HKmp*iHS|Qis?pWViMD)x)8;Zp@>tUq>Qb$kV3v`>hyM8$$y^ zIxBdSIg1;G7kqBlS=>0>QxH%9+6M4?8VnGqo4WxHdfAdL+6izxUIPFO10_1vvuAG0 z9q3)(CKO?Te^Y)Ne%$0ZTQ=Qj90nR_rW+Mu#%#Oxe1CYQyhcrK5cjJp9gq!BEo8o` zmM5rg@(4uJibI||V_{_7oS30ZC@GEeS=cf1rkJpjk>~r9H@b!QKiWY0;>(AatmV zOSAf+=+X4vWTyJ4V$KyZRj~t>Gox_~=xHGEKv@a$(f>RXt=V%94u^>nT_7$7`4?~m zxb7{sqVitK_@KstTlfVqFnc|~luG6)Fo2GhOaU$hCFL1Fe@3(L|BUa3ws4?5K!UP4 zT7VnSiim;&651_bSaE^JdtosTxK8L)TIU1XT0l=SFgB)9=V;SA^)F1wZN^6xnZ)Z> z4~YJ6`u*FWtKPgB0s;ehnzyK^~P=FB!6S?mdf(#U~rXkL_ zi;!@F{bOz|{Tsl*4>+Gso5{;YHi@3qKy86UO`Z1ulk9TG?dD*27qCYUu&OvDx8nF~u+Hx&>@80XD4I-Q9(5Ke9OjsoAMFo+>v5v|UAo z9Y}HOPn8^}VIDZ7c!-dwl<4>|DYD2A&cjE%rg>apI`ua8`oTZv9%LRKxM6@1Y8v>$ zfwUDk;(<6b7xcOqPZS$Fp8$cuJn#eeU?IzcJaFX!Dr(W0)+|@# zEafuBlU{w0?-QuaCV}uxS_AvfJR-jUcf8sL0#-dvu(LvikWCfrzHRUC&JiLD55!>= z>nyzU&(1Mkfe#B02SvAPur~%LdF~)`$PbHGa9F_2Eec2_^sJ{ zU?u~rE*m3bj#ZlD42)kIHBL5~1ss&yTHM+ySZ@1gT993fYY#S(05<}F59-N6Wi_xE zUB3^&Ku1q?Uc39xqz1U(0g5ZqX!t*@y=7RIYtsh$Ac%+{A<{~BcStEAsC1VI(k+5? zDh*PCi|#Jz78L1j1f-FW?q(0m_ub#OHS%=D6i++xl_hG@x^8SU5$@|Dis$!eu zsUhdxT#1$~jO*yuV*nG6fWT(_%N!he6&V9$dt~Z7;eKef!>I$7o}66lWaF7jNX<&4tLYF6x3o6) zbc!-{mc-#`=tgMRbW7dS%ku{3pa*kzJ4*1=qupcNu7fAvuRhK8%lGaH7aMo(iauf8w(&38%pXP1^# zwY5obaU=ivhrUQL2eVvSQnCeVq$J$#P+-Ad`BM0{gWr#i43neh*FQH#>S9w*M-GJT$du0-hnPJTT?Yu7EC^= z_4VffJ|@QJ!9)ajKsS>*mxDDxW|^iDPf~@TRT0Y{Y1$~rty37B1A^ZBetPNxb{eBu ztw`qtAf>SDVG4cc9cZS|g>|$s=}(R(3Q6nwZSbu+td7tIMu+tXA{c16K-(H9p-W)reoL=zPPTH%m&@fG(h48P8HgLZP~nZ0#U=n zgw{(!Y-}jKJJy$##bkrD-l9Knts^5N=izeMd*5egUwJrz95pYG@#Qg)wBRM+oj|&P z?gyAFFDGZ#7w`uw8{4wKk&#jTc6Tfn_!{8znVFa#LBa`0VNqrVhB;rr;S2siKSCAa z31GCm{QSOTQ9T`c%oc(dpD*M>YlRUA84N+ zk#GDidbsDll2%gogg>fpO5{9Sq=bnOA3r8OzV7X^-0Rm1;6p&G0Odk}lAt?P7k!2T zy}jVC(NVQ5H573pqa|p-cUpOdo2W<+TUuGU_7ENyr=f%#PmH^0BeEs7b%5D}L#ghSGGIp1MM!K2G+Yh?_r zLS$!U7V+)XkG8gjOFSZ?pD-1?@hrN*mb{N>X{BnN(QiEthn*uGjx9L=Du`Evl_q@; zk&)kng9=LG<>XYl5nMP2$c!03pM?eR(&*UO^Mv}MqN234Ck50nk*cbyxZ0l(JUkVl zBz%>bk^$t~a^H&Y_Ds~$Nz2P!y1WmIk2#0-lAX2UQKknnkLap5n26pA{=OXjCdlIO zXo`=VD!BR#RRQl{cXy)ZZT7Iypq{}D0t+{AadsAg*xua*i2TmM;r_+VR@)h|VAy;C zb+>UgNlb0TY-ZK{ciVoL^@cFT*KZN9|D@@AS1257#naO)br;d(?kGyn&YX*TbLvw= z@CEL$pImvavmI@CTi_$J1Bux0Cg_HSI7hKQ;J_J~nB)`q71!`RFHO*GA@C0T_z?t~ zf8g{fs;sz{!a!nuVseU#AV-g9afiaI53gKQ>x>Aa9m{|4U@QG8ZX27!$pU%wk4$3O zamQa>ap(v{_c5Z6#y1{SZlxLhj^||4+|=}@b9}O#^~yjAYs|CiOY&hm9)I&`T2;72~=N z1oxgf^7l7(cJsm_RQL*i=bz6zZF;12AlDPiO2BLpwE~Gq(=rY7Q*`tLep|1j>O|&a zhYF;J+{7&O^rqi@Zo-qWcfUTVwh`lpv{O%eJ2v0_DZc(`uW1Czt!t-caL=j<+Ke_g zr)yVPbCw=Iw?K9SnHrd_t<6o3wPA)SGzAjuUMT*ek-k&meTce`O3p8K{Q@^Dyn%^< z@-7ExyHHwooDaY0heZZ8!h1kj6>5~EowR?L@WSRNm4n)G2*Fxp(f;ob0EAwsS(XJR z1RmgOiYx@O-K6kvAYRTOis)ap|CXG>OO8Su3bvS*j_!~};;$D&d_yuv<}7U=Lp6B) zIw~p(4iBqxaAPQMQ#z0a;8DuHe1X0`1m|}-hf!?b@CeDTyx=)OgF^SmqIgDzWcG2Y!gUKsGqcsjMSBQ3ftW|m!Nd-ajGUjF z8))>r0#z*(##YzV1gaYUJ0RZw4rmDVdvmh@X)jNX>vfGf@^W*pn+!l@z18a-zEM=CoSdAg>Nn|dEI?2Ogq@Y~(efB7*#6J)@nBN5 z0ee9GpOh5z6?*l5SvDsC79b%2$zWAz%ZxTmIAw(2-3L(0c-=Mv6q1#p1Iaoh*9DIy zC2yppP-nP?#>NQ#E?Yl|*DRs}NfTNrLEKjcQ3`L_@uUT8op&41z~c?LU_j0g-)F0$Oc%)s2fWM^l0 zx;%&G3&cWBwlXq->&%x46Jn~W*RdP4>7|uR7s4MtsjQ{x25ULYB zBjd{&J0qxDfQf}@BqpLIS%KtLi3XJ0*FlmQ+@}ijlk0VE_`A-m5MVMeF~vV}w2rbN z?2J3i!rX?yy$e$2PeX}mzL7Xf&e08{LR&BcQ`41&1uB+*N6=J-_4s;i@vfWn)jC`U zN_I6qGwhZmLmRRrvs_SP3cvwYDmy#d)z$U+k*++5KEWs|b$^kB`bH2-26(ui>@ETO ziH#CrzyqE4pu8tDI-dToS^PUXO2AMcAsvD;j5xREFPNrP6NoU_CUn%)LeRXS^AiKe z43x@`mCcZ^w+&!TA97r`90f2u*DmMZwaSpRvf2R3!>O1A_uNl^vOfu|9F(*Gc>zK} z$o4|se6aL*Z=IyQxA!g@TD9lZS9@I~#1Uq>XbRuIr*qKYT8iP1wJf}O6U-*XIf);% zbb;*x?J(jR8r+{e@lj#Eq@HMk@D2t9#lA)yMJc@hr>&Isg|h-04N+0iuaaOoZzz7% zuQu+91Mmo?GjKL5r|0`pFf+2UK(H&w!=nmT42t#C2Fmh`poZ=R|m$e?x;m@A38YUTv|9(zE_?f6+uqSGT(SHgJ@o zoYH0Q(@$+SnwUFBs4ycgz+drNO~9Md{{1QmE3c=ZA0rcXf%HEeph&RmBcr3wUhUj6 zM`p&i09clqnp*nR_PJ=~`+L`D;bE zgHp&0T#VW-#8a@*XtyZ*zzSjOg0)B=0I3;(NvZ517G~xcR$a!T7P1^n$VDnacj`tW z$#?%6!3J-=2k$x)Z#mQ6TjtM4MtGfi(Ue(sT{aMn?WePtddxC^Ms7)e|0E{1(oGh* z!h6&!E$8!Rh;r)UbXKliTtFr!5JB4kK|xB2iX41=VF3X^7;0b~lHXKU7Bej+kMU{)p(!g##BQEfLW>g}(jBTQ(Gx9|&OsOg8=p<5 z`kMIz@QH$$0#VJo)Y8&jpt+2!B-uZG{=A$hnGwGSn3C6Ky8G?TIPJwmhRK4Dc<8>% zJLR#GoU!LiT?9A_pvgh-or9y`z_54-ty2KT{?>PP7Vsg5m`nnD5fDtZzM&!Y=PoPU z>3U}_28NEQ@Yz-S|7roIuBk#ONo#aHvWB?^sknUh5QucTjmwgp8MgtrmpS-(SpLlC0{sT z1QTPIgpy?=jFxuLtf}Q zCTNCp3N$)co`b*{B73%QPT7T6EIXBnnD9S2*4Qr9HIOUjgs|z7>k)`Q1;s-+!ZLV{2nC zsIO00e!DFkR#3pq8~^K^Uv!bII44I8`vckTM{ZF$JP=9;1qDGbP_WK4jEq4~QqdkR z+tt7uqWNr#PvgwB36S0u#j_KF@;3-A^74Y3=eEV6;1}y=*gaoM-gvV4hgZ8x&%v-l zCvQ2qyThm+?EkNE5luj^jK<{qL5t_;PffFcmk0kBZp@*bmXWd86VC^+kX$z8-k$OG z4GtLo9-yybXqaKo1zPccpxgksNocAH z3iLfR_(fQ|L7rz45XFA{C?_T+2Hg6j#$v>$Pb%PEWAI0Yhk321#$)g)e8Byv`x{&R zx|cym=h^VBP4>BmvWcP5uT<68@9xNvbASH)0VG$b%|{t*JLAP1{%Xd{7Oo|LD-4o_ zU1MTne*$R&G&FRXN0Tz`az}=&B4kuj{QOWi9nlSM{YB7C#dnv&0*4Za)Onp}*j|-B zPukFO<(!1w7D|fnI@|P6c%`9~x$Sso7Act_+zXBe(0bQk`3duNB;<|KhZpb!=ub$K z0;hkpv+xv@M0$ENYis7y@>Y8<9z!&2XSWO5`79tG7D@a#Z-^*gyQ3yHtjs?%Ps2ir zG3`&pRcoTk^rlnXvl|L>5s0P`tZkin&ng8s8-zJB?iY^>7f6{xq5{1H4Nw!ggOyk~ zu=f5RK0wNXl8&wo*zUGAf4S@`Z{8;3vR_kEfNX!b`gpC7_?04G1Hj`%pZ`QRx>Z>j z_w6crPWSsS504`{r|PC^WapJO-Zd_4S~EP&!3=l~DIpj#kf^@efc=@nBY4RJ0s}iB z+fr6mc5!|V_In<2Zw-~jZzfBzoh5HKa+no598 zfI*Kg>p+#wVtWL{dGt(7cOLprAS6cr185}Z$jO_&e>VV$Q<}#0RGGfELZAsLU$@L? zYH9*>4y+XyS6PL{*zQXoYND%)qvV7H9YE82ydVDeb3Vvr10fgHIFc{DUE}M4j_FzwjrXt@pp6oi-)nFK&$BV^w-YLCwiV2G(~?Or$&PP zV{EJnz%?|+0tOLs4j5=?(5)WGFz9!-3EQHytjw40?;iRaD+WM4fiuVjpxZN8Nlnez zm3UMHY|B>pBFya+1$sR6mS521qD4|!xvaEw@Y^?iAXTBp9!5I}{XqgHi1}?(uF16Y z^ok0Hdlg_X;0XRbFiqFlLl^{vy3`TUAbNn?MZg~2?}FIb@Eg_us-=|`jkMGD>|=zO zon2{as=kYh;1)`w#KoH{ zVYhjc-Kkgp7o+>w)=WX30({;h0fCz8YG^a3p{?E4+8VTkj)3(G(R)Ht4YhMxTEvK1 z;EACXyByGdc7-!=PGY%CpzWNAp`oFX5sPl^CrCI?l*>RS$Mfp43Q!b;1S&5*m?{5P zwC%LGybOAbc_4H_?gu)RI8e{PF#}_=vPEcWaE;4mKlh{3<8?yy9P}t?1xpI;<{(zS z9XJI|%%O}tsjkD26D&87A;6d9Lh!k|wkGJ&{NqQ4>IgRaHPvt(v-bB^AmK>@?jgg& z+Xo8?^dVpz*n~Tt;J8S+j6)0|SRlXSp=IUOwFC+zh1_emR$k5x@j8_CgE;2`sFk!y zB?SdSZc}XEC##T2t*>_jd}C?34)_ijt?x^AQ2D>IxVX5oG7ciS)n3hOTsoV01MTjH z8p-kQQgCu|a!JV!(C@J4H53$Vp;qv*dP?-)?>^?biRQBiWXSK|z&qMN;qO%!D6p)3M!HL(M7-3|V2X909Q za6}4oa|H~31VF3~HH8^PMIV!GHzq0!8pva@+_L^dS3tzs#13j{Mtb^{jwm`nf?wY( zNCA&)sDHf*DLv&deZZoRTfT#M5=1+y8XBhh`XxY~gZTwy0Te7$$q+pbhspX0Os|Cn z7a_)Fi?yt54=9M>g$eNSc|pAd#5KOCHw)S(*G;gd@Bz?*K)_tLz^AnpHVO|7P0Q4j z8+1YDCzv~z%qFRl;eeFUQX_=PvL zxf_Q%$&U~2AV50?Ob%FM0l4lZC=EcwXn+4df)05QD1)X5KJ;fMb6@{M=#}5HnuQ*S zuxy~Jc;nz83GCvEE~C#*q5vDx?)q~B0h6@=kPrYO>>)@SQBqQJaYeNlT%VXkFHcXou4451x^7v1DFAo37I@_-Ow^{4;%o5y+AhtLi`O(mFML-6wAVH zc_898{_9u6moKnKpr7_z`2F6V9v(iv#y1Oo@865V^;MafOTcKt-pkF&0bvaS@v7`6 z92SrxBD?^?U&eX>vvWSUi}mdMD^g&@u{$z2N?vFLgV3PK|A+^|>SPjFE1 zC%hY&@snO}2=U<_1DJ*T*H&g*+Z-7fBOuKe8w6%S{lLr13s=6Fa;YQE1t-J+FhNGm zj9UvJH9Q0iv!s025NfrKwUH4`aLXDDAIztsVnk%*AWS|$B2c6i78iShop*IT19ct< zJI2Syp;q*Z29!pWL!b|*1`HTbjG;h#0Ai`E#Ftb=ARuaE58-15p8-e0In>O=1j^W; zY7+?oUBsYUAZ$|v0!lbpKToEL-$FD&+^C4RsWfZ>FQ9A4?<>Dn4}7j=a?WB51&egMY>Tpq|2;U1P< z8Wh+>(8Z$TPd}~#$#IF8P0EFrTK!#7N0t)8p+5_PZ_Pc0DT04)Wngl1bL)I{6Mznt z1qGI{1K@G>W4EDlx$-xOAMSR(DKgkYQQ#>dM*|(yP%(*r9HFvfP(?^XSu2!2AP~TR z0OnZ!{TmolsI~>sArqKzaO)6CLFF(^2ZS{+wczOBltN>`8wjYUfa2jD(oeuW0fyXt zXEy%x=fLVdP^`e&xHvhXprC9sLtwi1oW`jVbVwpsTVN!IR`(&TcLM?4biRqQI&EC% z-7aT@vWI47uE1Y`4SgtJcP;IPJT1!m@Wh*-^Z5EuD zkr5td=I;LfFF=UZn~Yp157tJm=jziZJm_Kp|2H5getv!+Cx%S9Uvja&tp7(U-OR$m z#K_3~c-uSK)7F;#Y6})(?+V}24Ynqr0C0z(ixP!=;FS3@+xfW$3PMCbaP|QKXm6sf zo=k{YFhdd)ZF6Pu7wY`#Ey&ja+JZ$^VN~rGVCUn*x_x_fs-_Gj5wbdD7^r4g?8$}P zsSbL()#%Oa1#2f0(?O_vn;dBAfKY&Qt$p{1d`<%9e^RAyDnH5j_U`0 zhjVaoalwHBglH-hPy$iSfw5r452u;mPaiNVXwBeWIJiCwE@c2HaDA%?2L|*!fPE{W zSS}fifeH{rqTop-E}kUZa2WP)a1F2&;+4t4w9wEVAlfGe=Ru85Nkyd{qNbxm3XRRq zkKe%5gLRDPFob(%Qd8$(S%~2@oQz!vQ+_>@oq}8xC8j0G7xt=}b6Y(Lh zkFT&o4Q|ORDEI`ei_BJGR|5}y6LHPW!Utw}Bp;d$b+v_(&@I{l_fWpW6^iB)aQ2W8 z-PcEd99SyA_Lsn}L)0di2x=S)(4GZ+-zJI4=W!;3FEEstq;nDH& zYM64M-w{x>fPa`#y3_j_EroioW10Dx!BOoj&i3`Xk z3y5A+hYCJk5t}#AO!o6+3_i_2p&mo1vKl^)6>wH6Z|*$2FP8&58tTU` z-`HPV>&E5TKxhEB|24y%J%UAA6^#7+poPiL&4qs6P)EA0B|=NvIzIl-Rn@DoFB;5v zl0aN&uh-G0z7l~UrDZQ2M4;dYKAA?t!`)57lLiWqBP%HAnUz&x0C1d%rKOyT?If;% zBcIG~i-XJq)=$k3)&#N-_JX?mD!@DfBB%6wL8}aak{#||1hfgXtc_iLi!JyZSO|co zX+N@uRzcg-W8BrzkxD<&>Pkf(di>atp4{qLOW=UrgDgKDx3grTibbp%k4K! zKMfPM4`*&;QPdN3-^P+O>-b$bac)ESj0R#<+z*8^OeK=If%|8gmYnZ_tQe89pv)lZ z$FU%whuzQBmA8hNZP3QZ&&i{87#^jZxZeOo4Zz-DqZ^OIfud>zS_A>o0EmYtUtOI8 z;sd5EyQt`;f&w-X5wsO&wFrj03Tq&}4Z1y8tNrac@mP`l7Uaj+mxm7D8Y!^RB!aHb zzF1l1=Iy(5bX?!iZXytTEGM#w5y1|(7~*pB@|09n0bc7sjO^iY2{zbp;~I0Y6NVfg zJ2^NU^!F=Ff7UWIkLWQ5&>`ZyD*Mu=VBW22$m0H=NCeKB|Lv-4bhnj@E*b;R`h*FOX? zT1*!!PaczFqb+75T#z?ze@KdoRBIpM{WB!4U2l^JH@TuMI$qN#*Uu<-oPDqVpZ@}? z$6JgXTiW&3CWHShW$SX7QG#gX{W*dncAE{2^Uw6p#|(7esc1Nv^D@Fgy+_>fwq|=OGfZUPn;f*D^T@Crzuu@8tK)5HfEF#a*eO@uhkjEWPx)_^Y{{ zpc)mO9O+P>m}2Um;k}=@W7-*lqhGg6)U(Zwh&iT5IwVw!_C9bWQ`?x9xL}^RIk*r6 zP37EM!kxS~VqkU5jrq+;*Sg=ER3Z!C!k|hpr)yOs%4IOidfP7~Ae36`9Smdqt?+_g_C9+Y#^KHjV48izK$)jZBl2N+?bjRNyW@n;EAt zUfWIlC1+e|tu#*{`Hbr!r3l0X+~VDRWL=D&#+wU?)~$AixTXgar!6*@y7{4 z8!_(S1iY>3pa#BHcZQ2+0u7G`-JIrqFlOhcJ#5$=7SD4otE-;=Zcc- z`oya`8-l8-$bAB~8Fgu=30zlBm1*sM+1$2Wy2zwh$?|dAU*gKv+o^6ZE6k~~%sR0I z80TsN7(e0<&JH0O}?ZuJ9~ukIQP=J-YwN7pCT- zF_8qTlrcQIF&3t@eBrnST1zK5q{`-P zg^NRn==o8ANEWSdqR@@91FIJ_4c=_O4Rc&KmmQE2zk2>r2%I@(iIepG#+Bu8u{9g{ z;4@Y7lu-Hja?<$<5o>LvsuFcD{pVmMy!(xwsL@X-N_NZcWF8?I{QG)9eGj5cFNdk_ zhzrbH=gaq?APrSgt+9}<2XuJst+ z0Hv0j)ycWVFULepYniC-@M`@0p}}2#->qjavWky%MMqXH1{kOaJ`g-#)C@Aj$z?2l zH8!Qm#g-ss*D{K!{3v@P`M8m107C{N=#hPR!rMvt`xAs$3 zxr96vm`i@1S=lxE@k+e3gep!_Rl6=}-9){}@GacWed0#7PK0cgQye|Yb0hpW>_rr} zEO;%>P$^`>Q}>wH^gcJB=hJ0v!JbW_rN z9{n`>jS@HaM;Li=XTD;Z@@Q!FCKIk7s&KNt1QM=ZYVd0uM5g|q-j@@z{2d)$HxU1D zzeq116ZL1cO!_javi|n7DZU~~i+L`U_-3;05CZYyLEd4DT~`Fjw79X1Z4iDwRSkoQ zoRD(7lY1*!VRbuhNn|oxFHr@jReQvTY5{jMvLdV4vVNn;!Ql4$zvo?1(lj#;clWiO ztOlPVJxZ<>kAu&tRiIbjMY>}+|L>_`^c^=PpZCCY8~27 z5-vMq*{jV23eHWd+UaSY3r6y+s}+#u@2tu!-U|Goj>Bv>#D*fo%u*{bGa;s@*Z3!- zE3^Bb3gJgrTidrJr0|gtovppfS=%}=$2Cz6R_X=pRbpxmD0rbCi4F5&3oq1jlMG&s zjfF>lzR={9(GK}&ctzlG;cil;*{tR0&J-zm+T4X@{?t;avPtgg$2@%jqnS;!2lK2i zUD=vHQd@)xkj4{!HA{%~SVm&{o>nyx-Y$6RvW>BOhB(6=#EYwNkPlBvI6s(=I68UF zwCaATF?#acvP8se8Kugd=Cxeg)Qq4;wyLr?b?^a}+Ws0RFZtD^s-`&Rdr2h~wHRp{ zoNttxCjIr87V69iK$pY)62@13I)M{E&06(}8sdc?(y;_1hDE2i-P%h725lS)x;ieN z405egN2Jq4w%mot9r@?hbV=i7A0sm(oyDrT%xH2NmfE7Sp-RkX_|?xqwsmP!9LWa1osIHUI6*!TR?`HEFYICR}P z+=+M~IPUVR%jT84bFF(oj8AA7x8pJH@4k+D0zzA!l7UMbx$hGX3#av&gCu(`$Ven7 z$cnT--rTZm*#ftj)MmD_1-7+n=TA*z^tI-XZlAV8k$*J!jTI1%+HMt( z3M-RGN>Xoq2=?e^RI&9PYMMr0(mtAawjbpwRdXK-mut#S+E=YOr>}q&y+f-c4VE)tk5Z-EYrA! z9YcJ#m}Rq2igEz{Demi#=-8g&+#S%|wjnj@qfv;BCW~61^)X##;K$O$+Dt=B6y^fi zN0qf2q81Mt4Kv(wQdVL$<14>vq>V&qCL~3+C~O#yDC^Bvkq4V8#RV8fs2*PS)Zt1r z|L9DMva{fERb-|dsbR3Wqo_OdGE1<~1&4Ey4&{F^-@|_)pV-7B01*KB&@vjXb3=d! z|3Cid+?c$EuaZrBH#GCP@aN*`MKXK7vf8DL?kT=iBYFg4xaHx_KDvyRyIHk!Rh1mc zD)+0E^;wKGn>EUss(0sQY^8YjTQ-k4FR4>#B5t`gi{iLH|NPLs>FsM%V-ma!nJ#vN zLsk(yy0UfO-HO3TcL`1A<4l9hgkQvXec=s~?KXD+Jv0(YznN)WAiR`IE!pdQzNMZmj zi4raWG2Ue!uEl_LI_D)`iXNpJYfyL@dm8q6uXK&axr$0rLDxWm(b~fYKhLXGw30jU zi{~0uTQ3H$}{9qm)JwCvaPKQs|HVd8!-==6kdect(^Tg zr2|j)%HGyMHerADQuFUUJpT-52Fc2PZiCq;~|B2dwLL2DhxEEWF=r;!qYaQR{I=t!5~oNj+G zl1gzn#Q*Z`#-u7E-&InvYh$mu9p1^u->mW0%Tx%2mq*~V@W`k_v{b~_`0L}+{(YC1 znsL!7W`zTNsdattJNBgV(7RqbG2xI<(#O!VAdg@D@{k}FC}LBUem%Psw)pb6D|O~a zbFL)FX1Z5YC2T!+3j@ETnuk%g3;iS ztYOyYNoa1SNztw?>33wD?YB)(#5H=m%f_)*!KHR*4WPy7$m%HAv zzg0phEN;mC>cKugb+a0_Q?B`ltezMa7*eb>ATBVoe-PP(O*)tmBKXX(q3lM zhzfFl3ozaudmy#IiK~Lr-1+`#zv(V&?@NjS88(|O!p!Gu{K9Fd&FrHVGak=68-73Z z{It)w_Cqfw$Q;c3yxsIVX-bwgm*}j=_dmxZkJ=nm*9EuaZhG7%RwwBYoqxflv3+du ze8IS{SCk#w^vWUmOEmTkag-L^Oe6M?`1ui4kM9*)KCMmc^XDYERfo!IWG;)spX2ZE zH-7Booo4$JXUxVwvCSCjR>)l@%M_}wG2*N)8GZZns?tAF;fJ^WJ*Nc(`LZ2$i3O;F zyS86AhEx%Dx~*iclU@*E#5pas|IE(RlV2%VW|Cbz+i0IPNT||2YuC1Z_WRZNQJJ0W zxQgt_D0eDDLQC?ucycakuN7#f7W(%I`*vYogJ{ERpu71F8* z#>6}&29yD*0AU6l>wXqxTzH?PA#K~93995~pt~S#y^qw0e#7FfGz^wmmu9AS5{I6}FQ zssH8?oue`;0#RDIu}FmO$hxJGxO+L2?U-Av{1%%Duf=w6?srB*{vXaCxszi(sD@T_ zPoLZOO=8?V{(2=QogupYwnw(hkdXiIPxo`NQKH%YM3-VkJYR}3lgqi^uV)y(EH~+= z!)10(AzQiVUbD0g`u8bvi5>Zl<6Lh!sN%*qqna9NwVFhKeWzwuzyGY4giR2c+I1$5 zZR*I|T8%)uexaaL(Ad8mYq+qQ1{=ZB#gwD^C#$&o42#dz3;#z||Fcg&`TfGzKdDR^ zypc2|+V4uNTSfjIa$4*A#rZYD%Z;{^Y^C!^lcat}uMP9o7o^J)3oAU^GwgHAXn1Wg zj`f0DLA$&Bi1ynIyeK~ZZYv6Eo_v|Bf(MLP2n~*bL8IZyP>)_@IazLUWG{TZv&oJH zrNonz$Gi=weTnObgOh(A=u26CxRb-}t@$Kf2@ym-{}lH_FuB*0B|35jdhW^BIUH-- zU7tAApHOXzga|fl4$d`rq6*b<%t({_^#_GB$nrinK-gBJr6|SsI`PIM51+aeB)+~B zzA2OW)G^QR;mg}EzP5b_QK5|7%gsNrcSK#0#3c$d<_4z}KHO6*kuZ(TJFAL14Z$3v zwZzaLf%mc&N4|&TrLC;kgDN(D*_WI>s8{y+b9M@IuYA=E&Mp(80jYf}H0W1tIC9{- zz29PApzCV^nPgw1a7XT!5W!vox+_#C8mK(7efTX zRNDCrhs%ER*?m5x1)VyQE&`(r^)%%Db_8Pd?8rG(_f|P2!@5D<<*@UlRL>0R+6>v7 zvZGqtY}@J5EhC9QCLgStbVUN5)3zf?H1;e6A{@&k#;S*C=~r2>ov!{Z|5Ywm1q1@Y zBI_DkY?`JSZGSTxW$)^Q)losv+$=JmX*J->`qBBXCrlTLiS{(rv8e`*ucr*mywRuZ zZdYO~%xx@Zv9@9QZiHv8+*_0fX>zaQ+%)M~1xb#{UFO|)fuj5w$eWP9uwS{+!6+=V z`^@tJ-hL3HL_;vItn>&4_KjJto8dDLf(F*p&sv#&SZlk{hhpY%qX+87#6N$Eq!erS z?q>|ieZr^~6fbHzHvf^q)zHPUkFQN?>7SmmU_L9Dr3~rL&*y!aTbU^MGlB6P^<(bV zZqhLd^X1(k2Kx`uo;GFjPHz?--8WPk4SkT*WvTy^1Qr9+# zp^2S(DP0<~w3MTv-{-q%3QJ5u4_#;Sn7<&&OZ{=+K5@bSf%6@znxwTl$ER?^h#yKyIQisNY!4NtjIHiyzkvWh+wwIH5QOZ&*d&FWW1QAPs9)U+!K|&h_!y&Z}dy^ET|c08oD957^~4fxLsT)5k!raPtCQYT==fG zb%wfVEZ&}0#JVW#`37KtkHrr7p)zwd>Ef%@9#7Sq=0b%{q=+LjGEsaM`eUPVTM9`6 zHwEjqw-|L+vuUIFM~<)Gwu~<1JOXSrZr?gu>b^aN&_xQ!5VJx+Xh0r*2x4J~7Eo@O zZ2{r)pC4U+%m42`!gfZU3M#0wv2{Aj{RBMwflM1g_q0nknpJbD$9r8o&t+$kX?3Ll zPYOhCw@gd^Z>R+t+RIn8mRw6*Ao;!J)E=%i*OD$Gx;ePH)a5g6t4*f&r3?GC&Y!N5 zdBPpb7<;W*b8vg!JIXG8h+J<_al3rbw+ z#}oN&%kLgE?$oMKV?A|p^JdNOayi>=9ndvND_e`dXifiG^m2WEcQ8d2w<3-jH8YU? z%s_Rutvf`mUyX%r(-i-iyPWxaA>t|uOO*YCr-Q6x1zRsIVYkQ&SiF1SRV!XGDrhZK|k(f%J z<0XF2^PUT*@h!KTej~c}$|<23%+XqyKR!gQ4ShELG8>WOfGgn9%!7p)H*`sgvh`H$ z`>1RxMhW@^edu^0ZQx_G!O{ydVQfRm_{#d~bWQs1y1jkVN{?6_iZ>qn)wnmmyGW;3 zNDz*z)8=<#3afpn!c@#B+Na)o%hjfH6-X^kt$I|d2n1dyRzoM1 zBE5mt2DY%so0jp|Z}IXOc0V{KxkH`)9VTVu1(^`Gp*Ts^^)UHW?CuEfrnf~0V2`99 z6drJ4uEU^|I2f$>iEKDY78jS05MJi9E4&#@Z>I3u@(`ld<4TfYL5tM1OowG7%1CGz z?)w$b)i+h{VhRcACSxF!a7pTq+!*7P)V*Ur5*igsrW5T~8npQRa*b_X<#KGZ%%3Pf zOT|$nGk=l2t}&b(i$mnG+9^DGRk<&>{pdu^;;>}4#yDf}_$}R*`|&wPgNn^M@z%-V zgPrUfffAJbOAF^di#j^|OyQ5V)@uZ^oMcoE_R-9rj}8yrM7TBlY3it5T5xC1ldxHj z5w}QirV-bsXV2$f-5lw{*_n8Lbm+NSwmkYYa`m{v;|gP-nQ>=LKDQ@;pLn;lAdkE_ zb+S78C6^`l@riyx0g3j-4TEPqmL`^mu~wPTpdsb7$M+Xf35NN_vVpI6(&QCU-c#C; znSw495}Dbm+~MUqlTuRqFT%v$%=pAlArR%sWa2}L1XGZ;`d^Or`(sZ zX0xAmC3(p2Rkj_6CSBatF4T;5V2q=EIGFJuF#n|C#@0iBy;{3EyTw3R)Vax{nQm&$ ztA@AB+!w=T+Kw+@6mcb$S~X@j2wEFE9X=lMOl>Qw&&?pfi`>As(G@7a=gGK$iQc1Y%~ z(yc$r#NdHntvJbD^We1JxsjJARnBH2>GF#3Yt6*2wE1w~guVU&T@Hbwu$CoD=6tqy zo3-1k*%#0HPLS=t@=2Fz3VeuTw5Lrqx@{7sD^sY-wI=L#p7FAPAIBn~>U1(>o$5jaxZZw-zrslr65qQl1IG~VrT5i}Ft*0ziu_9yKoS5U29{mRQd zb%x~Bs6vx?a42$6T51%Xv6JXyGpw7G@uI?ByQY(le}CVc&LM9#k00MpUnBUY)HBgm z|L*lCYE{Yg_8W716m?m#OgEw*pAG(WIxxvvCmJ+)C&A&P?_h)Vv48DV6;BbA16j8o zdZ>)NWW7^`k|0|4>(>jU9IQtP9WCqyl$LQttr^c%30q}mte+2^{jq)@ek@yy(M)XJ zJ9v7kH#D=xTFcOR<^0hb!eYC(as*;TF6&+D6cGmog;B06pN&h?#E2;h{q%m*29k(} zXW&$v$|hanFJOpt;IVlr*jl zUi{rmyK`?D1*Z9!HhHfhADDO5Y7=?%5tqR{)k%5(#2I#pDDz#cX-vua0p^MG<*!)p zDhSj;vN%kEjYkc+L06VurBI#2o ziI?fNzr5=5F9tEj)p3H6&+)MXE$#49?^a1S;WACo!=x)7rZ95K2wJZ*9@!Y14 zZjHBV*|xk2iu+VGY4vl7=MA*wkG8W51G2~P8<6al_3x+`Y8~_q#`e-)G&oNq{PZz# zhDI7k0}jrUH|>N+w*{jP3_^)yT}OLRAyb;M(vHnILy}V`DC5gZS$(c8I^aik#p_i= zc7$2-&s$zzk?Rkyp*0_4Vb~Fg+|Z4k`Jg4tfV(ApVsFPU0wH1Aik6u2ovFg~=I^gF zKa=xBILdD{vtzu@bUMn6e4!>{!jmr=Nx)xl^Di8&8`t7aiG_C_l-rTcrYjD$?wPpr4fC3kD#H!R6CXC^yk5{>F~c zyh2fgUXJrP^GEFMiDlz;;*=$qtb{Qf27fxtIJZR>|1ZyaPNqL4{J@MM?tP@k1;jQw zTk=&^4cU8}P$zzF5&>B(Y>XG*wQwqg9^XjAFTi{gosKz?9Q+EBz^w90>6ik$HO_yq zL*>qu_+&#r=_F*lE??{zGcKqT5sz|Yi=lT;_qdOn{l+zjGx7_qvR}&OvDEE96b2T7 zLJTB4h`HHiXXNByGFSOTM@m(45=C=*!vg}P7`3AOo*4Oo(fi(~Xz}_!XrXl7B-o>E zHYDsgj<$Qa$ciHT@O zx(;hLb0GhS$%gLxBCVstGjuc@`*U{;Nz}m()op|rap^&~g3%EEo++2g$S{!FPyw!d zLvQU5hvJQVL@`p&wjw@txV|tMfE_x+J zUl(wKD?|5ft-JYzgP5#~i@S3+ zcnbnubQ^JY%qu;lGa}MDrgnWq@zKU*g9u74|(lCZWl`XL>2Krsz z>h}CRMlW;Pd-oa2f|faKKGnUO%Nlv@eJyI-AIk%vbNZprM%bS|6U-t~s z%f%hPVaym^v`m_fD`>YFuO23GzI7^dWfQ$`$a(t}J)^#NM{TW^p_uj84W!(njJNE@ zy1%LpqSCUn#icaYjUUO-6B9pZNA%|)Qf^kM^^B`m9q-Rk=~9~beju2Q#S38LSo37P znzU@nr8-h4qtWXv)iY<>WL5IW4^;czDjkkO@S?msTu|zP|4Xg!C}J9Y9>tODi&lIh z&Q@=S+g!KS=05kb-f_2G>1%Z~-zf|fA&q-4${s-)VLqbt&1=6F89~l2C>bXwZKjWN zue~IG#*psWOEzQD0qjeO*IhToMI`PGi}Q#6E=7q_k{*lrRPle*_0<7!JYBYfySoJo z?(Xgo+&zTg7Tn#PpuyeUox$DR2_D?t-sJb~zPJ1B+xl~Qrn{!As=MyF=bY<29sfHz zy_kmxPoiNXFh{M@@J+Y9Dnf`>BGU+cN_?kw6|#du^tue~i|$Wd^^B|(Rh3?W@;G(G zFV<93L=@S0A0pm8(J950t_LP^MYF>IF{mPxknKqd^}fkt!}DOllE>As%)_6dlk^ul;cs6*iLN9%{y3MvZ1hycRO==$pC%F(F2x6oYK zv-%TP*{(b5h22Y-LiA2*$il-zE+@Rk13fRv2E~hvZU+BSc}du=MtB?LP%+0#7|IYs zx*hAVG?B86D{rTrls1c0-edDQ?xS$H1#lRasG8Ym)yVIntH1Hzq_t4C*|WyJDaz*R zQ*Di%^!Ci$VmN*vk6_>2K?jusKTErip?a!CwEb>EMS&1EV+Y;BU)U+IS7nmSHc1jT z>PAGNtS7&4f7kH1K?y5kex(ip^Gnv#IlOC$=%p;Ta3)FmEYyOL{?>Sx*w~mzP&X@7 z3zh>!Zcmx))pB;Gu2#Vf`2lRzf}d2SdcE53-SVyKLd{msNI|VTaMQpsenpG9qTj8U zf&@0W!3{;KZ*}(r%5!tIEfK%s4vs-U0CK3w6#Q$`c6RVViLL2?;gWS9$HUbTj&k5^ z*i$_N{-?~=?(_)rm6Jr-At*1eRgm>aF%VlgW( zszCA$5kOKRj$@(%&ndiV7SI!$N8Qb=vb0h>`C57Zb%1^AQO8HFK}*c2 zT*zJna#Vcq~jNmv`OP}HVq&n!nw!0O>H5@f}>7VBiZMaWU zZ!sw!<7s!|IlHh*W2)<)@~`c2wo6=?Que+n=cIQ-O}rS|1@Z0U_6Be8N3aC_w~r!-L=E~j#yX|SE606kxY z@u26Ob>~cnw7U_wVXHs~%lBMi~_fvvCKz_M83Pq3{p&ld1@W zQXO4QEKLU57@d<64RPHEfVgrZ?+-OJB_-if*kSY)GE@?A($A!=oeQDJR0 z8My!C0#rOa#jbl^v;$q&5o4wl5crygvk=BJ1*$(MZaZNno1EhJ&)9K&Js2bh^LfGH zL&Z z@D+8Ko>e3yR5E}4W-PS+*8P*X%_yU(ZxQlPjeL+Uh$Nw7()k@bwx=P@R+A zA@;BE=)R!LpU$qor7?W*rYG;nu2&FSc|KkvukmB`%g*`@Hn{>V?x; zbeV=aM<~9q~?^EY8k?18o^kw!s^T>%0Hy#}74(b43?dhMHI)Bj= zN<&g`QMa^Go91^a57kU*xGAux*-}X}5gr*eF37g=A`|GFy%-*? z9s5GT-BrWwRVV=S!&=gY8|!C0t(lPm?Wm~q!kcni4j*J@S6SLk2_>nWPxy`?!paYI z65>!C@fXiFt#88BAbrmm%ACx3A4Tld7~+#Doww*L*}Oz_-tV}#APskAK~-Ulj?07O zJ2N&*JLP@~EQyhn6z?-MgZ3x=gWa0pL{_|h1dMM6a3u!r6tr%i?3w5pWA)U}66xf8 zE=hv7q?uM#@@wdgmq@Da@=k(}lwB_*i!SFo`;~Q)@2PJ$`})BPUjj|ApqTPR#q!x6 z;o<1^DWoJRu);}#0-}au4LeoY3?PEsr}B0T$2RckJ!>6Tuh$a&l~cu_Wz8Z+q}dw% zi@aX-?-OdmsI>R@rW&XB^bO9_^KO?f+aK}`m4m@ak&=+vNff`R@TeJ)r%(U>kyD?b z@g^synlIXyjT836z`!~a z$Y8mg%CmXZ!p~SmHWX#CEO`>_^p;^hd7I_*%Qg_5{%*swPv2ap-z_Ict|ng2r5mL`eek~w7If=>y!c(@e*gM4E*}q*qV44( zW5p{A&NxR#%$Bp-|Lajbu^k~Nnaefo3o=71nZa`;IKe}wuF#zrh>De{CYLL&vQe{J zi2|$aFqeBp0=s2B*hpm$@nSv8AOS zonoLa{oYK(Z`U{I{-$Gj-{bc|62C5``R+5yWoJc#h2L~$y|(&$*AWgO85kwwmH+XS zN0en;@ez1&WcKxO*<3+l#OdP$6RArDq>2CAE`RoAOdEk@#b3WZ&Kt!*gBikQyly5{5ZZrn4e|n#9{R)25b2_(44CXco!^SR|b_wU68aR{29 zOu^F6IVq(#e&(}pfpTQrLi@zPz?bu|P~;2AtHRR3TsQ}LdE}1h%M5p6e($mK{U#P& zO8Kq)*F5J;6^aUQ0O~#y#=Teu*7V!>R;Wuh^3?n*0Y#d!v1nk=En;*u zA`;Cxj-&_NlrI{Nhs#5@@6Zjq*ETnn&ayg1#jFW)d0j%5-pffd#z#kmJ4T5I&JjI6 zuV`WN9v7$1ITzOGm9m&4txQao&%&-@my6RMp32z^u1^K*zhKz-W4k^=2T4OsWL$BA^}yiUt5wZRxd4YzW>b}$5`zqzL4{&IP>$H+*&W(@L8w7eb8 zq8UQjQh8TFcSt-Sz%)N!Sjp)M?Lf+*J!sMF&ZO?DcwMx!GDu&W(&5`XySwq{qS);q z{N|b^ZGGnRxEzpmI+{2Z>SZiDdgCaePw82aeP|xD9ndhHbK}4CWE^8x2e(@LVM$wl z;o9ypVVmw`dQ#;$6_lypd|}A4eA-E!x7TCK)qM8gM#Mv)L2>`9&VC>#4QX~9)FHQs#@W6?&NUu)lSUHl0M77Bs!81TBZ++C5EUCaOt=#I>fc{@*0o4`))InN)ux0%>q z<@Q;qrdX8&0H{Y^Jx?ZqWNrr#ASL?2dn?@RkBn>wCTWg_yf|w-VTJ;m?-nzvu-=*(GFXQP!7*G$@=28V*%wzDH>>BCNrdbcpNp z7(X|vEB&zvaEK{8j=XFdRU2bX>Q!d|JJt{>i-D#o;%vb*Q9itmkF)WyS>b&qZB9SFL-R+UGBm#K6lFmA1oM zfgv>l2T@KQOZfg`0yW?KZCUPnf4#Gr9Mc!BgJs1#t@<;q?$XA- zaqf*J{&-NyX`$O<3n2rBI1w$h6OQiT@h9Pk*;Lk^KzRkm^t@5hL`oaiy)?v?8&P;t zO#RHQw)ck5t(&M!_b;Y#QbDMq!zedDGPbG2Wj+&?nu`*(kkB!jOW1K1U)*w7xn#8M z9UiSm%sD%&P2fZgYgCWRb#(UpRIRS0ULBip=TZn_PP?ATYtcn57E4Q$ z=_ma%OIb_)kloEtx3jN}dObfmz!r}1LI0@42?_gWiZwo~xJgwdQ+F16pU){8CVCIw z3iHCF5lf`yeU^bClZ{8*ezNm$K**VNnb)O3%}-dA97eLy0g!-G=OVJZ>feTigGkY*RoH>x<9H_FG z%(eS$Z2EVLu~_m-!|W#uv#vQ;j7k$&uI1W~L#TRe8^fJ`cRU=!Dh;wRr^|>=U0`D* zw<%XMOmtw(t~$-Oc@!b>(yZ~L_PfwdM)f*At=A6|2xcudubTtZ4^Q)rfo&TFy6eIl&%F#9E!Q{$n<*0 z2v$krX4&MSue!i`##POc{bJ9Pxdi8Or-;!4HpH?32-mc2jmNDNR%zR$ZP;OR>?}S& z8m=P1kJVKdX4bCeuk!HY@EkjZwmgumF;E=i ziN1i=Q4NYNOND$yS%QnSv(krlAGc1!A6F45HX6BDMGtE*%qdi!&56jW^4eM^i>Un^ z65^0L2rm2?eFmRm-Yu}V%~^Q!?QHC|?cHa_v4w(Q0Uve@&Y)cCk3gq;ei&#=q&<7K zCx}qo(0I)IwMJhTarw)W$&_sK6Tzf`)<-b+^`CHawb04_eaC3p1x)=ydwFv;c5BYT zo#UgWNwlC4eoP%uffCPJf8z`Gg38aFuSQS++|V&okc&?(h)=pOT0t?$!Cb+!m1k#Z z2`C$2w;g4-nF#>VT?yx~C3DKk;K7wo^UW6WX2(J0d!nM~lW!L1in(GnYij9P$$FNH zjOBwJo~sZ&ejE)6%4&+mc{mDY^R6-WIR0Qdq!8S$qp>wcQbL!JRO#tm#$~phK?eb( zRjsWwa`fmTep*DJJzs`L;mSefxE96J6NnAl-Qqzw-YYw4s?M}9PL^vVGhTLIQSV>N_nt2rieyHV1iZ!3z58&&kuk* zDB^|#2o{w{ZcT%)N{Stxc_8^au6MZc|1qxNi)Hk%o6%?Qiv&LSB`RZsl#;lKBj25) zkt6!iyID9m@;FAtcab7s$U&p`WOOpfFy_3`yxiqNJ_>g_xmA^6i|O%b9)JiK$@eqs zJmet&1t8FXvF`D4T+o0RT$Jv5);c64(3R*@q*#@xsS5XXVww+gAzn3m`C~=|_ddVH zHa1d7N|zM^O0ddjn$eh}ph!djBxRTyautMn8m ztSf(m&De#NRHo34aktGy4|O;#IhBspHx(qyGf)>?s!iILKI*?-DEbg3yLfC9fgikG z?~=sRY2O*BXm_u7P(TAxB+!^Wh(+_^dGt5SO9j(9mhAe4tMTnwo(0P6RU@E(`r1r% zc=r|-BR@NJaBXo>XIpZpp_k5S;%qAW`*Dl+05pqUc;%)WtVwOn@Q1 zFZ3WH-ua7Hyy{VrV%N*dYXRb@@C3kaX|?oYRNT@PJmzTq+cbJaqL*^S7EdbWwNVE| z1bti+0&G6Yo>co&)cPmtJGs6|S$~~6C#oHut!oR!v(R4Atx%1%jo689BbUTew`voS z;7PV8kjr~q9-OETeTr)A2B_Po$?HAoPkm(Gwl?0Qx{x|vUW0uv4ARza90UxKIjxJZ zD`+2kg#Z9yj}>TZWl~ETZj!w>&&K@r&K&LO#zKw^w?&~AFUY16ToAS?3uiZ<6$1n2 z_9kU(*Sr=t3$BeWqj%cS_H#v=-P!LbAwkOH7~`ePXa%^fgE;U06X|%;R`K|ss~utf z>!0?Oe}&60fw5snEht9&SlqvDu~3;gP5+5P2ALj5+#O$-FaGvB;@msVw=h}>p_BJP zf6=hTmf?&S7qj=f96T5plE*nsUD$wa4a?BF{!BK1R6fY3)+i1E5?(n%SUj7?tqsj` zw>0d`iBo7-GQ!MvA1%*7m(vgL(tJ|T-0xsHoz+H(^GI&JK2Z}JiN~oDr%fL5%T%YcQv**j*nNDg2&Tu zmfv+;{^-l_ccZPgQLts!`2i}>G=$@Im>vuvteug;3FuMK@OX@ zX#IGuPf@OF_mbSUQD~V}m-vDuszwpgJNk^H2imDHmh&mG;CC)>C^$Vd_(n^G#Xlat z=58_l*aHAy!2x-UgQ%jdeqyiyF!lG)eFyF!Kf?&kgZ*SwKTtK|8W&7R04gN1pZ{Ei zMK;!xFPD9#ual{fZN){K5F4LMGD^xw$%^XgRrOoPkEu@0f#4BRU?nX5g#~DNK#BmQ zg{5=3u0}7GFiKeg%#pdDwvcCx!LUWiMx?PY_TIvFuRj4$(IJtQ?x`&i!gg*57DJQk z%WJVKb?YLsH@eW0)YBRE49XI;3EZ_kF6B3aoFjz}m7u<`G?Yvya?^`e`NdF*%HPef ze^{>KI{I8DFZib}0K)23>S%Kd&ZCw+aY~~lZAEf(+DQ215Wm}zk;T*Hiqm6WLP3%- z?#Tp|E~JbCX&ZxXlNVbR`ToALN@9Otns^zN@<`?|c1 z(zS-rLoUE^+8aFo_SuB-e!Qi%3TtBj2C=AnqOd)|Cj@8rd8^^gEABjK|=6^yh z5?FBI;W}p__vd!(X3dFrinjPR?pkC=e$z(yt$W>prFy{x&qU}hI(je7-H%sKBc3-L zdfXDJBii6haIytIuTe800lDNlOm#ny5|JtF{=uoX-_L-lSuBOP2&e$J!nJ`7Q4}K= z!#`fx38j|k=49$h7y9Js{f1KQnt5!`BL$sH%{D&N$cE5>SAXZQp5E~XplIgU-Dv2> z&VD98YLm^gvC{pmU@57e)!qKTfTEzzcuvj7;EtL0R`*=3SQrgquQ>X*)*mJXu@WW= ztqN2H3L*fyXD_&WUQ$QjPPs3-QW7{)pa7A^GrmUyUuc46GeScToJ=4P{sCR>gEp0( zzyBKox=KJOsuEs6zW*gGu^p|E>}_+i^h~?L1;f=qZde|HudRd&=U4P4V#MKgL3v~y znPme}>(l6s3ZVx7E&f+Z-}yn-)7Bjg1N>RJ*}57-;Q8jN%eqcB8G&=NE#axWgbk$!31bG9U7yHU$*%D@*Pr70 z?2y`2RJump{ewO&FaG30PJP@qp_Op3?SWAU+UPU0DeHWrjRj7ze3k2(Oqo8<@z`)G z-er)I{ZxuqMxm6{GS~jsGMAhW)ymH~b7im#PL+3ti&(MSnVwO1kGrRZV>k+@+I+sx zc|v)kKMGv2;5kBj<@T_~HNSw@5h7{OGX^!+vke!QG5G(^uv9B2(L7XVKf|qu!?EKAm zan}bfhKU~8&|(UVFkP8ILE?KvNd}4htBEk*Qse~${dmXDp?b?dmwSiv7V6IZ6v}ARZhdX?B_Pu&4w1WrUHJ?hj+y?CgBW^yMKG zSw;$Bx{55iD*jB|bv$`~e1tfvsL0^%Q1xc%0z#7b@!dimiPH|l^AQ5+s$lejCNVL* z#j*HY6C8)qWIa~(r(hwbJRjA(#B98W-{%v$4`dhynzXla1(cQ|>NVG%=jQv=tKC8mZTkXN4M)G5dxgd4xWC0BM!@T< z`XCsH=2-LtUO0ZEMiwy}Td zZ^ifss-e!90X$?U-KUM3ri%;W#63ayE;p7v?Qo-5<*Irh-pX9X3L!SD6a>hN6x6bL zCk|?YK=lAYZ5yJ85D8>A3~FA-%>VBjry!rf8C*&_xij`&Zr{yny^406q*E#T8>Zm$Mu06IG@PKfi6*m9kSyUNwt(*(rEvVLaS!V znV95DH20v-uMOmk!Z6adl@_gkccK)>h+|Mn$q8cDNnh~FJut9{PMvEr(4A=Yr!~R? z_)+_&qB<^^Roi`Ux;7JViehGCIk*sOR}3}NSU4^=Gq?V3KCD5gk(7D6w`vH?d^{|i zrDL#|v}2Y(L_^2_gqfTrIDTWuzT9J9cR28Z#--@S?R>mbcPv_l3jiH+^WF|6eIp#I z=D&9R6L`so2u5nD^Wj@Izx(y)d;8@i??}lh z?!>$i7=lenw@H4ItS*xWhqYe}2}Kt_eVh4-=m==q8tWQUgiV%)dOeBR7s`kmO4y}2 zk|@Oxh?-TtBZ!YtzKHRWMlYFi=u5ZG;#c@#vwJIWQKyv57kZTb{Ood4shASL`4M>}NKKNqQYn^;}k`>WcDz_In4l}4bcN~JPWU>3n?jE&>HMg&fz{qS^Hzw zaUj#nN+&dG)2h)`9yyyoXV@w+DwoOG+vf1zEg$U4+!5Cq)jPj4;qtIU*)#Xo92Au( z%;+mISNR*fjc|{DTaTKt`L3A{W|!WEx+ z6LNvtM^~b*NOsC{x4hv7MYSbgTD^w3iVFCJH0Mp~hMwJH4n{*MTSx?6N1wV0yi&+v z4jTpl0QDoyd*U`-YAj7WJnWLU^UmH^&VW5%#!R47y+_&w>wUH^2=}X`awF2>>k#22 z;TQd;Hx@r^99mVNUNipE#^`8+U_K+m&Z7%Ge`Br+2Oe!zmg#vT~a*+5hxy?RBXj@61*(>o{=JUI*Acpxq(8QPRRp2tS*69 zP#4MViNoU4XxD;}Siwk%h2HkRek$g)7&#+Ll_;7H$Z{~C82v!vCj8Znvj-f6X52X5 zo{foC5q7WC*mCpA$H6FQ;EZG^SL@fhNqW1`OXqDNVPOB`_1pRV*KWFXfXgW`ZS?qM zqe{PijWTnk?PQ(J$ZYBCMH7(|GX4rG(U0t^- z@+R&P-{zbb$aG0$OsR(zYHB$=E{856NQH;zN~Vr0CTLqzd7Iiw8%%%AVVYXS%Qeia zPcq)}Ky9~{hCE|TEoR2IFmL=a7FsZ#Xfg&XUPNZwbuzq>1}6=Aoz5NI!fLxAjXVkxl>OH2KiNVepQcOl0 zhRXlIHIa{!z^|YFH(WLGo|8l%fHlU~Tf}qRa$BASw7z+{DzVzf%0fBVzK`$~)3@kF zB=E^Si1?$*X4}>sb^ujS>xPq~SH?fu5k+AOrcp~I>VTy+HX<$To=hO>O*H?d2mV3%3D;-ARw6^(S)RcMJ;Asq^d^Z1egk_ zp%Rx?+lD^ww??fJ%phBFDL$5zQ&XI*-f*nPHy3UW8A9JC-&uP?Oqk7Q4%Dq&GP`~_ zETk~-bP5<3qW0Fw6;!F~CFwPo;#D^3?d2$Yv#TP}vg7d%6xEkWGzmLd(hh5=L6XL5 zWZOh>mj5>8t+jD0=WocZLJ+(v8e#NPepFhvJqQ)obbVcU**u7?e+i}Idy*~$u! z0GU|E#9Zr72lt&dQ;7IflX&k~*qt}fb%Be90lR*?Niyab zI5}0#sYIfVRl=o;ep`vLdNB^d6@{e>JZwBzw)3D3Mjf5?aeauwB!}b>Rr-3inPJ1{ zGm*@YB{%qBy|HND^kD20y@V=u3)aPv4@-6`w=l)oF)l;)QaX!J_~K{o0=ENMV}m-JP(9R(Fo`b^xQue zvterRV`AtMvMT|xB|r9hx+=*Ki$W(qstC=AXI_(dLM;$pK189GBgb+-MWjjCIaS1i z-A^gQ^q(=RLCsv|?3n%PI0h=La1U^rrx2-sK>F2W-UAYZ##kVbE2*nP`BUG|)LSZB z{56b~l*Ar$KRKF9AENV$4ZL}pUvYuU3iRY*x`IoQwr*hRj{TO$vw)Dz=N#Da?9^Mu*Uz9l+7ogE>)!6U~=bI_~C;GHA8v zdp3N_#_kB`10KsIMRLFk`>g1OHJTe+s68O8x%3WhJzRY3)@)S*51KVoD?~Cln$ZDI ze`YTU)1zx!JQ`6{zEx7Xhd#?Q3e-FAj~d$Tj{y94zdLNnO;6&MX~V;hsAA*&{AxW{ zdr*c@d@-5>lj3vQm#Zf1&M*KRVGGbhOycf1~E` zAR2>7qXNQcgBQ^0U^JVRhxrO`VXQldFWk)6lDC#(czq9s{MI&iCH{q7whd`4gB*cz zNlS}b2p|qSeaN9b7A)qf0NePC7&w}tY&P=!p6mi+1;)FZrF0T` z-i`~cao4VRub=Tyi#WwDv%`&gY7vqEScc&LVSFM{48-fI3)EWB7W|64wKo(t)Bgn34`;r(qUg7PWLV`?M> zopRZH35>&rM&x?{zH9U96Z?J+v&~jd#UIn|v%0plpZrMh3JJak8K`^v;k+^WuAICS zUg=`@Xw~F0)Bg`s)cj9Ov4Ny1*o2^p{YmCjx0TrWW`Ls7T9{7OgmBGeF=IPV_gzqs z>x*eCa<MDe8+Dn|4nR}x>&Fb$};IYtb5JOb45r@ z9@bij2EI)ieQ`O*sj*JE9Q48rAf=BK@>QjSm8m%h00{;bR3qsO3m}0d1V9vP&k539 zf7tHmeEb>jUDw)j_BC}mr3dPg7(Bgskf}ohOj+i4*GN6+yTK*%aoX3y&$I5QdO;#E z>Tusl;^#MTDP#Z$*aIU`F7|mdJTR0eFrGx&tm9>_#|PMJ$ZAj|hKZMHkR3n(UPY@2 z=i((hoIj;2E*+iUiK_9&0#`5|vbO}+TTG+0s6&Kx7z&5kWHkZ*K$Ws}&BfETf0uFD z1EC+PsGi2lM|@3Gp`e?Yx3GVm28lcpbK?3RL2N`U& zuYuJ{B5oBwQ<8&G@^QGJMl8I@F5HZzGf?EcFj^g8?Vp&MOQL%o8(~xsF?UuUJ#C7G zou@p_Thf64bAq~Lm%IB~bxc|lo{NKSZNLZt%{Ec1SKlK0r{aXx1M>frd z34?dt%x1~%GKb(hSZH#zy|(k2{DeMFxRjYov%@do($dGJ$5QF16vywVq85{VVvcQf z{=s8|-NnIq6=7*WN!zaL4$PE&NQR@e0H|Af-W4QgL-s{3W99`g+TM|Jt6GG-?Zs)@R^iAP>>bfovE~+`Z?r|5m)B2UVvCI?ctJmld{?eeV zV0`HLB*0L)AenggXO_5Xqs7nUxMld|+ZX@iXg_BXJ^z2B|6yHN<(ioIdpRw*E5xDK z*|jl1e<4aCbpJ&csFdvNP=^Km#ria~otXPtj4Nu+WjXCPVQRX!RQ-Z_ zHB1$qP}i-ok+d;zzk%96z}s&{iM{EgRzIuc)6ig990bF>i1Cz7f5lpvVVkp=p6s`9 zxzB)QO&<7J0WZzXSJM;M1RdMz{RgD0>B2iS_x^5;#GRnpN`~*K;CK!2nft*1IP2ny zXT@SJgko1Ohkb^Zhu+tTzQHrf-w_?g4*MbxRZRz#te5pHxF&6*VMOX&sl=6Z59XN0 zZ|_tO^P9Adz};x6FF@@Owd{I#cZ{Bh$a;Qbl*A_iHNBVXh-JDqu=cts;6Zgjk$Hz-)mzn%T3)Vy=7GcM2L7rf}Psdktti zLy6*=Qy|Et!JvtA(3~r>WcZ1T5y$CxZA#<)>|~V&qyYI2+F3@wRRcyFWy{Iv3#x^* zr|Ap?V&o83upZYo9KTyBsT)ElXo_5Zn}h^dn@9}ra~Nb7ZX`AQX$7#C@Xyn) zv-4Dz3jpMuv2q0Q|KaBi$8{_VZLK};jI0tL|?SDaX*l*#){!;lsWWHFUe}3^7>roBm`RA2fV&(sFQ1=%VB1hiyNRy#K<%@I7NT|lY5G4^; z_H;)GVu*#(Ax>gN%i%*ekclyq!~}#*`h@Ycb!n``ra_ydbPv z1Bj^c43rM)v55AjRsRV~uE$XQ;@FFoa||87a%4H>$K?Nr+z8KW-d4C7ha(VlX3{5) z_`E05h>97X+6T|zkAZ%zp+bwx-SMgr}Au$W4A$#)RyY*1x8;d9+X;4znjgI3fmq3yztceA% z-BU7xG|}YLvFYXhi<;BlmUt1sNN5XWfK#F7=URy4JD$p0wPVQ(p&@DGABqA&O`OP} zvveigw)8z5{zetmvN!TTsH#^4LpGsl|5);vJ(|V8%OcJY^a@eT!`x|=8f-W{Mx!ZR z;ImB-C=&Aepu1Ovvw4F(p#vUSwJ!d$gL0;4FBD&~-T zH3EXMad(S^97t5wa&S;o0sa#9Iu1_0a$Ze#qOX?6{=h7VJ=pQewb|mQVmBs z>c=)?1UwG1Z8DZm1<&t6S*;QkErwc$AI=1Y)y&FAc>MFs@;(2VW*7D$ePu8t3I3K_ zbG{AR^ndg7Pd|8CP2D(43IDayKd2MAdTVXWCO{^l`xm>8eczK$E}tAfs9Jp1Z@ygR z;B72}6lkkmf!Z?@erqPq)rj%`>>O#B#2kO=EZYcL*{M7+KT6tfi2jpNrZ4}bue*&0 zH$`_09Z_DsvO)NgTCV?7k4>5})SU_aUxR>oytY%@v^7qIBz7;p%!F^$|8Cv1CcRQA zXVO>PSeFM4U7OTZTp#A&>|Nf$84P5M z7+?zs2PPWO4aWSxY*v`_XCgPw#9b9Zl~+KI-Nb&OLzFike{vXVtGrhTQMMpLKno!< z>F<=CB==Sd@yf~nn9>5rw)CiaS!-6$tvhM$=a2sNrw7nUV>XPw4W4>1-q--Y!Y%uN zhk99R>HnJTOUeb!;n4_g_F_5$t*CXceRqZD&Q8^}daX0c^N!@WbZi$VWjjV@f^9s^ zsXqHnZzd`$Mg#!%P~OLEWNPCBSp<6Ql8jQx z=V$BSY_;~kYS`+)r{1g*1^+bAismgK5&Wy$6VP`TWEGQBjG%%IsyEdgpM@gL7Fq*$ z_T8@1gO)M7WMcheh@xn)^LV~6h)YZY?OStHw|tsRuc>gqzur<^J+%~O8fSxi;VPx8 z@JchvULPM><)mN;AON&%#6#*Xc#BJAD?o+Rf2OKG{J}*3zpk6jJAQKe&vW~~@xn7d aAb=icv#aI5t3iogfQ*EKc$Mhafd2#55IU#; literal 0 HcmV?d00001 diff --git a/doc/tutorials/features2d/feature_flann_matcher/images/Feature_FlannMatcher_Result_ratio_test.jpg b/doc/tutorials/features2d/feature_flann_matcher/images/Feature_FlannMatcher_Result_ratio_test.jpg new file mode 100644 index 0000000000000000000000000000000000000000..e4a88d0020fa3aefd3260a331abb990da7f9e787 GIT binary patch literal 80889 zcmcG#1yEewvM4;b`(T3&?(P=coxv@*yF-v*VQ>iU?(XgyG`NP~P6!Yn2_(Gv?tOL6 zt9qyE)?f9%-n(XcYVGOn-MhP2cki`cH(vJuxXSWM@&GtEIDjJT0K9GkWB|yBNXSTt z$jC^@C@9FN=(yl)03Iq_4uT8<93B834-Nqj?sW`61^~bz z!G!%k01h4jCNBzXkQ6q6@E-WGH~zx`_!LTq+9y(9)4J?UCz{sH!Rg5N$kEte~hdDh$v=u?5_(Y>C2J zWvA-!u>dXM;iFS^a0x0rVsF@3X>=G#1adrxX^Fd39+l7hGYGs0K!kAsNR{B_G2JQv z!V^H?B5FKp@cu|5QwmYcCplbH<&wM5s~3rPn!6AT9P&y;RtR`CRUkY-9rj}bByb@=y1o)z5OeQrbNBar_RG0E zQ`5)VyyfiJT&fYZjI4yJG*Z!)M$A~a2Pm;ZNN{DA=wK|6s+jw}vbD{}VRQH4C`;&K zH~?hpAFT>u#iuZ+aG1l(S88GclwxB#P;aHa2ZWF7rA*6zK?sji+`1CE*ZpJyw%poN9Dwgf|CXv`)H)k@NO| ze02iy62h$f*m-GrbHY6PS@NbLJj2e@aHl|Jrad6c$^a#(vZ}C$b5IN??A%37il;d% zfU3F&pmfYWqHonadq2!Qm3Y(yU?Q~xi>-((0svCTsrkZiX#U8As=S!p!4xO6wO`7((^B(mpvCi)H1B}@fZ>U zASYwd(~!$Db5n!MrJL>+t7I+kD29{uIp0S?!13n8SS2Z%TeKtwBF&OWzytKiXj@}g zNVJa{HBbH|Fnf!M-f0i5W`9?&LXvk7Q(}Flwe>}SKt@1SKaiEXKo57BHTyAY31n-- ze_$hOxtXO%Jx+vz4b+ka>4H>ch0I5WslkNGMJD{J+F=`H3?6-6^kL=j#mQh?e4B|v~SEtHl{bRQ9biwrem%aLae#eTW8duOJ4J}B(+)}+U<{>oBFpWXgG+b8dj z(jW(AmUCx>;ZBUJp2LEiMeHUuFDjO$G?r67lau(Gm-7@Gst0|EB)Jg4K?j;;UaF{q zIQCwL1BiaJWbw^3(L!Tl!X*wx=a!t{jH!{5u6-Tp8JQG4O9Rau!se}?_FW9 z_wr)|Qd$@(|F>J^b58PxgovQfak0H?cBix7rISt1b$A{kv8A& zn7-7r{hQSkQI+pJ7I=&!=jdW^wLaf3cA7smT&iKhbeVULpt5$B!AM0iZ>;RRl<$?c z%0YTGMx1D;Xh5>1B?l*;J{=B-_<&D@l0eOrlJXwiAsIK{VL3^-Rjfx5!bXr6gF1o- z0QC5j4+(qGC11vs0AQ{uV-iqelQ|1f9AzYT&}-4s@z8!ebnaIxHOSiRXz!d6wI3vG zR@OdEU^D70|I&S5PrjS8)$scl>tcdnhMg4gr6T(1ylRu6<1FbXLrsvRHV7UYP6c6% zh8HS_i@~UD9;+uZ8gzt3t2hc4jAS9-l+CLFqf-m7XC~6UPrIIknF1>K!?3XP1Sixk z>q8h?mk}*%(tKW!`j}zn0!}GAv|shN3q+^W#YNcBCF9|Q{LbVuX8ub@VBntXT)r^d zV{hb-qtw7wX562ZOmCSok(P))OLZHf#v%ZafDYFFQC31yk^{OT;pgFcEUCE)vKIN& z<|RnHTtmzm(=w7;SfeFE+S254`dVAzF6V0@5r4{?>=|Yv1>{!e4YAbQ^4Epjo4-- zUOKQv8IH>dRW9#z&4^D*Atgg0g;Ww?#zr_DbzdrcAS2f1BcnzOUZkP4%bxI%?SPO_ zm-r&_#xuf#w*7dO4~mc}D%lH~Qk>QNC=YK6;@#EgfJ;`+AB__Z9+h0r-VAKV~X@N=44| z(^{4-Y0v__xU6=VEc61iG>#Vwp@JOXQEs8W;{T6Gi7RxWnXrYz7gs-ijPEPX zoeK%&_`;DG;wJXsu^=r(I;fM|^f(QaU|m^wcPqjWemm#+-0c!@&&aUwmF4#jxq*7` zPo(QPhE_;-Ixt>kf@JNEHWhZnLzTN zPLBYt9|wQWb!t4c}ZMrU~L9pAEsGMFLRV3 z^l4V_*=!JGRcg))+H6~Yqpen_*00!Zr1~MPqe{)NPbD%%vYPE^(O2YK zD#+|rcey(E;Bqqg!(P}|7EyJya?>5yrs--ov6(II6&EoQcqj2oD6@ntyM`_pu|Ey)YlVAgX0M zk!t4jR(|qLL%@=XZ$rX0MQdX~??Tzra{iK=1Ky{;hwceeZsQwb`IY)bv$hWp;nZ8l zWsS;{3ON;lW&>Gz4K9zLE5x};XUv#qB}6qvEYGJfpNsXmWHArDF$k=VuZd0+9E&ep5@=XEBLNY zo_mx+)DCrVeqKA09}pF-RLR~F%Iaoy%vx^|cd-iQ^*Jw8S**xeo%Hpe>lVIL!}>8e zp11i!-6lKo5N(d2m>>O!?xY#d;Apm`n3`)3SfwYtN9l6_%N!AbQi!a?(fHWpLRurS zLRykiD5wAccqzX)qr@R+R;-nD!7xIcjDW{H!H~eLvr;jd=CtW&8fD(y?!P|cp)f?|Mq-1<`_asLgC@>(5K6kBi}uj zH7ZNZOhAB+AS<+Z+*$_9vH-AZjPHIA4uJGeEeI=R{|gLb>P@jcgYx2>OY1%_7+i93 zq?A|Xwzbcu9+#i5DVE2bdeZ7H$kfEB2r8S!w5uJ)eCSx1=pmf`u&wiLDZY-ioph*7 ze>8oxV#!;T=63lb4jpUG*hg?bUKMocScWVB>`Tpw*{^M@v zk7=ctik{TGA34I8@~wLO8^(k6*?B9|E7h%DrKbfe@)bYz&uUA)n?aYHeLCN*&9Ax$ zt<4jw>i5>Cuj$id$qi#^PYsVt^JrUU9S}%noyIZjOP1M@f4_HL1mMDa2o6!~$GXYjokidHoB6 zYjI8`07cvDWW|{rwtH2AtV(=%2IhQxY@?X7)2ojSY#anCEb_sf;$}IyvjIjLZyxGw zGiJ99S(O^jj;R&n<1rWw)cNY>&P7xtFoms9uxl^IyrF=H_$rC{A zp*B7XJk9t8w)`;Md$v1BIjrV}=@VB9N8;ZM{)Hi!uaY=WMv6rS2#s_LyW_BljCx0? zUVdeu$&@!V5r^;KonRx)@m=NT6nV;1tI7LAY)Ly{gj7mb!*@Q{()2-dq$!*Nt%DDG z+ggMxRRjxm06vGIf06$;0#{)spdktWc{nmkdQ(%z;7Bz>wZjpIlskdI$jgyKzB`h> zN6tu>z~pl`yZy1VMFI0hMl7-ip6VhqR5s8Mx02jCo`g>m7A_D$0YCpu_g@$pd_j)4 z>_|#FhOMDBw8)iQX%a+3m=L~c-)6#&P0N&w$z##HP!O{2aVrNx|bYsz)wj|a_Ex(G) z*fY>#$KY1pp9qZW8O76jiz6 z<2^|6%uh7LL}fnEflLLItuXJvwB+zfjC71V^G7WPeA5TXB(zG?^exTH#53bEOhsH$ zu$BxWh)-tjKM)37Cdu$AB+=3X0aCI;qnU!LR^a$9b$vc%Y0y!*6&n=lfQwJTJwRQd zsKYZANyCVHjm3jcLjbJuKnfog_D?g9kPka4`VW+SD*c@a(2-Iit)h4%d19l%TvK$E zhIF|8JKd6C~UU^YnSdPjJXd>;yy@h?6A$fh>xu zk9=6rfFVa1-Uq%_U%ux0r7e9QVKaX2<4(J z)p=fJ3^4@==pXi}3rqzS5k;wo#H30HvAr!+59KjIe9~ITm|R2uf%JJ|Ua}TyqVu_0 zDft(!*^gHBY#E7S`=0>j2QrnyVd`CV(vIZa8(HJ#tQj9ZQgnZGq~I!*$A|KvM+?YH zdLC0F!a8duSpS)}dK#?G)Epr(ipQJ`86DheRC8eGs$SJ3OPt7fPhTVrc zPks^w%-donJ*oO&P_Y;^$wC~5Xp_qJ@bUYdR+)?f6ak8DB26o{j|>IxD_{f1{)0N; zfAu)#SjAvVI*o4$tNWT-~9<6FC}{d z5dMA<8P!YjKS*rIhQ+5|WRIJDVnJHukgkw6NoG5BSVTt$vZ6E*lBpB67#$M;Y;sTt zk5EDlXhHJI;Kd<;zHkfNq3V4GuKlenEhrS$=jOr%z{A7C!6PCe!vA-_8x9_TfQW~W zM1TtfQxnqAa`SNU@=5V)BGbu8%fi~qz|J5Hg+PKp5*-GEn<+mE+XiqWLyc z*GD%+dg)ST$sK}U?1!l#oR(kv1QYQ8e_-uTHuUf(ss2`owIGqYZPL3|e}l2YI4lzXlToV> z>4IRdUp|PqN+bM%^t)^68ywis@5rADnuVwrxIY?-7)M~g>@*9pE(rF&NC#jSizNo# z8Qy<7)nLvS>u~0?RbR8dKAtD_O4M0XeFZcN2Az6;GZbC;uj5+uhNU^jlQ$)o*VcJ1 zdum=DhUdi}*$I556;_*@`|0JdV^4Cf(OEKDeqzUqvc*4*Y9(MRbn;F~l$jUM0J}O( zyIw!D!Ivd({#SNFFX!EZ2B*&5ss{Uif6O-YT%3I7PQNak(G_bq{_lbRy5vFQ=hSaY z-tSsg^KQbV9Z@;N0dvK^c{yGVv-2*(*|YPuUc7k%>Q~?9ufD~6A?RWBvHRZ#V_^4g zgP&9W)~U$pa!%F!D&!Tg_&ebjj@I|L<8OB3LUG_8d*=KR2s^p{{-sd3|6UYK5}X!X zsYdqc!AJf(0iql0`YV9+PmJGTHw*@sWCaz*GV35i}}xadV5+p zS@4cBU?<>|slYY+q`JVi`fJ5Iym31h>UtUg$t!^N0fYkyJ_zCe?ean$|APB7qWb-4 z2!+hk>Epl*4|el@K2xxUKE!1UoX;H+`u4@{=hD~pi&sF1%1lp@_%w!DiqIB1D$}E5 z7Q$WC6uCo3GsR+2ad|sXCx6iDnp+}{Cwd`aD(qM1orKoknTybA#>2N2Uq^xuDGH?v z7>U8g`?)VwgM}YFUIEqnOA4PZW?c1gu=9E*A6@|yp)u^wzg;}(v(b!MktrM6L6rq> zy{4HH5~rD)mM(TO+Bf*_NewD9u^FO@wI}Q9bqyz(QCyqvu#Gw@vO>o)BX-L0P)X1YuK<~4yHnz8jpIFw%r}u^rCy9w9l_|z z>HIsHIx8F4C43`KgQL|UKX{@J6)LL)YQoL24JC?S0hF%*3bvPRdZGmsS5zsq^cB`` zep;Zqv>wCvveR8T7wsH|z0RG@K~qC5_t`PNG?jwrc@^QCx)zD=(p99mNs1d8707s1 zR`T!6i;`Cf^K7&(4OKw4#J99N?Oi_p5}dC9n9fusk|P$~j)}CM3y_UokpG-FdUo2s zboz1=Y|L-oB364TIMGB;nk#1c)@<(Q-+Z*~2sTyg-^*tY3H|&END56zdd{FYmK>V+ zRo`(d?KwKH!o@L}w!M6vpEe;A=U!%2vrv~h4{9c>wq^U-pbzpimcVbG=VwAaYMIf? zskfbKZJ!W|(_m`9i_xa7XS%qk7P7BKOo(yxG?6$^oiy5wm~Q@sa}>BxHW9Jxc1(Po z^qf3NEz?P>?PYsFP)JB#>78tU3g3hvTWE#STrxIbY*-gj9i2YHjA;FQlr4QjySk$O zE!=y!?!V-H1;i&kN4O-9IudpxgN={hX)Il~`FXiBhEi3(kyvnPU^R9*+QigQzgVc- zF=p9Sprl}WyuW4ot#9Sn>sgqI?G=Au1fWIOF*1n#6)nK$;L=igAoET5s4H|Vqfwxt zy!ee@ zyQ#JL&c4N1&wvHb5z7>VC;Cq=3T?)p*;$BmgI3it1gXsf$4=8lSAH5YZ45zd^OUfU z8`FY(X!BUE|X$U{EUors+4Vh?IgdL{@*l>2Be{W&N5-HT><6Cuu?~KI8aH62r@E z_ECSb73%h$X|QdX+$%nFFomD4SD2{#>WR#16$LsGw1Km=xvmMT z&;E(x`JFm1xJyO8;@B$18d+PCq_3}2*ha;P#U0_U@fTImE5JXc@&_X#WoR?Ts9%bAw1 z^j1w6kb3WN=V3L|lE*5_*1duni}$7>`)I&9XSdpNtnP`Wf*hr#v)D1HS`AZ%78Rxc zp`qy&5Q7dUFv7g8EoiOb^I*8w+=m!C7fke*aVWnK-IaefO4p(ypR?Mp64IQ}oa7FK z7_Zyj8Nf$5b6(HKWY$S>$fKV?Vnkb%W-ZNwZ#RQ;0A2}K45R>@i=qpyOclcQdTyK)YCht_3V@S zHNW4f&viGtsSyJmIgEq6u&=`{8p+)j&jt^vLj3>Uao_9p>S=$2^l(>I5ft` zRL0UTn|ofYY<&{^X# z!8QvW-w~jHC=>p2``JSN9s}||%%ERvWJ4Gd3ANPql!HitzJc~c6t?t=8$Y3r8AnDP zn*-mc8^$Lms%z(p2;9KD&euL=xe91RyzU`qXG%#Q>)h(04j-YtqbJhYYAO%&qWsFE`Dn}!?-kpQ!->>i zG4aCIA(?Wo01~D;gV4~~KSmne$}T>QoxwhPzy6k69m6cI*`v8>Co$)TbBD0ppbbuk zFNeTVj`HkDHRnY0?#`&zd`r6Z-i}*8UZyWU%W9@A{<4Y&hl)c)IA8ws%5jNQa>l#_ z>*BPwKjew@ZGMm>#!Ym?{uBP-1KsCH4P#U_~BYL~|<8 zygpiqb@Vq=9+r@jM0Fmu^1K9o%a45)Pp$Q6sp+lwai91{`AS!aJtaL=N{p4GHuY-F zI@l!B+4)M8@-r{d{&FVyG05$nzf@G*M&B>f=)!*W9jAxdsvn9&TK&ST$VXQ47PD2_ zIK*SkcS)GD(YxY$y0bQCga z{=rmL3=*ITRIm9;^V7Te?r)|7?9SND6T?@tEju5%oBXtOKkK#D@yc+dli=%DTWr`LW1Kr2{KLr7NGA`9-Lo`iBoJ>z1P+^ zb4DV$=6ONuWvDk z4(ijX#(FkUSH_CI0+A>wv^wp-55p$x((XlAay@c$2$I2B7tpo(z+-1Dx1W2+JbtXP*K4vtd%kLjjebDevO_rW+X;( zZ!LBFsi;qzvoCJ3q6*vT2wr$^c09}5dNVz*B^_4OK0<_kPo zF=P@j%!+#OI({nJV$N7DRyUQr6B|-dEi6?#{FD3u$W$uYcP4r8GtMpEuon`lSyUi1mj}ZyK z9+I%I4yY&96u(Z?#y`jkLwmy3@FX#;VR_@xaf(d|V?6vuv0TGF>!1|g)X=2EK2yh{ z(fqJ>rkcUj~9P$FA2ucDHh48Yz5H9`!8Z+;l5JsSln!O5z~q{td+zgSD_P{RTc5jDFag$Cvi)wrE3cUo1t#PBuhOs^RlQcdtZOL;AIND!Hvd zOCUe|S_kvlWrtp-fG+_*w$P_LvzwG}Y7Div*hZvX#(D+;W6HR1{X!$_(=$g~BEMZ6 zm?Z7zI<1vl6=SQV&cnT-k3sfjPTy%H4N#~z5L6!KnNN+RH|1wl&UYp0N{VH z<(qeU>GV1CbM1HWXVllWLx~c^z55(FGQcP*98P*^-CdowpL{`lS2lV`d&(Fe1RR4! z5v(?-1<-zMfi?h>3K8m@S|-nuwJaYpbTTkR0<#yJ3I>X9n=cvl*Rd5hr+zIH3me;aNw8+eaa*QF)MGk9UW8k3pTz8JlnRKPBF z{qBT>D1JNJSKRk^{9Eii2}s#1AhGcIZ?OpZpWic1?4E!(HkO-T_;;Pm`;r_xk=waf z*o@VmD?DpF&B?b6M5#?Oz?*U@=Z$ezJqa5vkJw$0gmP0S6}!1s`bn;ZUCqwYL_1ON zTWkfUR;$)F1RZ)5Bf#ljYY_g(5ucU}j$$67=2^Ze0qY8Jy&MO#4Mv#@)zx-3@|`@6 zqfg^PdWW`FUm$uMdZBEOr(dndn=|(BrZ3;oSk+AEFtU8Zh)&qTOBR|fohAt2YP+bu zzOWbLLT-KqXq1Qka~GvJu5-sNEHjo41sW84L;Y4(w)kns>T9ySWNe*jv)o*FOZw_Y z@}+!DV^iVrQF#r9#rrnIhN4hL4ykkGd3XJHc08#S?N|)=~m3}mfY}`P3fOc za>LVGyTXZrcc6eBhf|N!yvYJD%78vE(k3&{A`R||fNQx6k`s(`Hv~_%dqVYtvm;$> zHSUJ-1Xs5EbSRq&Elb0^D%Tcx}H==dkYQw`Uzy$}6hJU&zYozL;?1%I=uau#J)-egsE+ zl65mdls~Mt{s33$#7?iB(PWy;L+E^-?e_}UpsoWw;;Bz2O07!loFAUP0?a!^(if(* zPA=^;t&~MMv5_>WDpRo8OL%zZ3uDqVHvS0Ir2Gz481Iv&?EVsVaKx3rWrziA7J4dk za#K=Q;MHy4+*?J!kCe7QI(>&x8tU;eFNCnZq`j58K1g|*{;MNZzDmg^6{{#ETkun5 zP%4`StEI6TOaDvmWpS_1e*UR;W5QF`X#UNz8{TcB2IgT+25I=4q-XVXQ3n)m;lAAi zYxGr~^h!VTfx|X1i6aC*(=U*-#*d%1SIf;qSdv(yMr;E}Wc>Xbw zE@4KPuUs12v6Nxd<9w6cbuquSW}*36`y%r=V)JJ6>!ga?r@MzUy&}P_c@rG?S6Un=NJuNl8{U{h>J9nx*^;{o&t?UE#dX7XvvQp@TTZ~0QSDEm>|-WTqhgvmqS-Bmi?-#(b(2@&-vrNR*qLd|AJ=;1qJ)^5_tTS`}=;;T1QZ0 zk{j*Y>mC1cSs+$-Mjr>SOp>ydjR?FaUKY!MKD9Jf`?Il%!PQV=z909#;}gp;)!RP} zq!z@?73g`}+-J(H%}|>c#qZBZ5`w!TXbJT__36hy9D4&u9$=kyUt|`oeABM8mvuLZ z-tQi+-skpp?w+h6hRkd;ZK^@pwraZLv)|^K%?Sr?GYy_7nd;4L$P$u_kwg(^@ka5N zuf2RHR8j$i;Vlu9xAqP!Qn27Nnvmv}5Z7(6(K1DR>|XxcR)NiH zKMi%N>D|vX!!5(D{8AyYWmtTHMc0w6G>pdb@wn>hMyz+;kLIA@KK4eX%+SoCp^9E( zX7ufZt2g6DPAS*=%5OikjJH&=#kiInZ#9dgk8tYR_iI=2a+?K{(dt*pD4-;1w0^zj zq?qne{1uR>7p%zqTW&06Xv^x8)v5a-K^=^~-w3@aM}iPs>RV7}kmuGyJHEs!tvoH}Dy zUY7cP{uHI(7xURB4M7VGi-vJ))*Zv>V#hxz{SD?ZU#LltByv9xYV=9!g{UPh|5O)o z-15PK?(TQ~UH&U>)SmvwSPZ8RU3O>fl(b6Uoul+J;d=UN+Fwwq?QM)Mk(N!zeUqvm zntjCpb?M7SxewxG!{x`GrK3TD*vBLStti`41UTMrfj+%;h7|3Dy>SLi84`0g4n z7X17zZM3wOS*tB`%FfqDog{2eW2`zIdkFaVu`#e$WaU9h6cbHX=GstDs5Bmr!e_3+ zkV*s9`aocMTzcSeelqxi6GhYTwE7hQSuBxl)#3~r)(ITDcM?5YdL(b*xu~$(xXe#fd}?6rE(chB1FE6;GRiNmmFkn4|6a*Uo8 zY&SY2V+j@ZZa8sHCY0-@>mXJ?JoxNSMc%lt5#NP@SaI&{&g5t(HCtuJB&58fQb*KOUA7#a z_G?@s#btc$L@xdaSN`$E9H}8AZ=G==lf?mp$j?GOjVo3ZJwQ_1g;lJdox8fspHPK3 z6QgxC9kuE?^>0GQGX$njLYI^i%`g0{W-f!dl_wyq0N^Icf|W=GNA5Uo-}t9C4qe}6 zhkPC%s-0M2P%3n$rBmI=M$D4ml6-)6?$~-jUP>ryCHIc2xq7s6^!MLBl#*A#=67CZ z(KcL#(v-Z}Z|as_R2Lqujjc+Gk5&^ z?gzbdAEYTt8i}u~e%EeZ&(9O)6&DY7XhSkO*CVMh(SLM9Sgq>D2Tq%A^z^$lmx?lN zxbcJP8v0b*q`2|ijEuL3?T$pE6?Y^!qt!!*d>F2+e@wz!>Z^V=S+yKKu^C6AoissQ zyf5XtU!zvm$tpNFY+a>IeN>DR3$<#}Z=YrMntpTqDQ(>omM36x7;NlYS)p66?XuhM-RW~Q>e?>eGfZ=aY*W@Gg(g4=~@b%PwE#Gw!IdA82NxfgthnOK}+Pi5`8 zeVV@)C}N`%sot4rEQ*{rynILKD6}fFo#Jce+rK{QUWgUg_@_L?48@F8<;ThF4r2&a zC{}27?PTREfIgI{a4|bH8ukM8jwDccjnLfJyp!J@y}38&SQzj%U3`)Y)OMS9sstF z4+#Ms0R<7ZRSyQR{dx#^)c6G4JW_~SDY!ryX>$vY;>GV^-eDvz8SSv3 z(SNOPfUSd&fYW^R8>`X+_fWd7h#8vqY9a37--K@>_5`p|3E`v!i$=t1c=&3|)Yoc8pn_tq=CZ?|kYoQ?LIb4}EnPYd5{ zYS7=MH_ifc+?kq^y3$t4!`la&KZBdvTM61qol~e&skx^Q0a!+ouCC&bP1`IYa|P`8=>IV zMTE(fS>2}a({Sl^KC&;Q6PVhodh$BNPQy3u_nz=Z5$+HldG~2;T6Pp^jMUtYM5#p+ z`iYej;U$fnVS41%zz8FUys2`5hxmVV8jaktttyL{(#!EcAG=GFB6;!kV@?h$w4|Id zphm<*cWENX!)+i>@Nkhnbrb$=#fE#yrxRvG4jnbjx$IsdGhOmG0Yi%>1WG&FNeb_EI(oMlRPx55W~S*M${ zO*T5M(#1LR!EhudZ-bmnZjKf;;E`4+Pu(ew?UX%CAG^OO6<$~aS2y&J47#XAnnU{t2D0)i+ z7zZ{;F}O<}uL=162@&j<^@YeG*>?jNM)Sd`)L z@p*O&(U;?jR*l7N7R9HUUiAE!X9$HKvCcc*>rGU>acE#4HWLl(D=;6dc$aK(k!B~1 z8)tBA@kP5w1~-qomj~BI4=9lJc~VaB+TDSKVSMvo|K@9~@ARzH(@npGZS3w3*#z12 z1l;r{bJN;{qFoJUk03 z%yjAY4VB`WCV3>aeKRag@ctC~S*>Vn?gy4-brvp(xmPYC zJ?Z<Vi5{IMRvpyQRm6a`@!7cFrTY{3XrZFyRvG35xVqYWp;+ z-pTswU>AC2iiF?fEG=*~Tw`d?6@Qr`mq-6B6lG10&Q1%FBDe>=+!kmWZ+#d-n`)njAsI?{mE%%Wf!!`2-NUU=Iw zRF5&O1a&e4qMd=h_6oQ^dQV@1`wC!9&x(-UIL^{~8*Q7SXP!@2FeSUuAz{QATwth4 zzZaG`+j!7DGl)-xNh?JyXhbr%hZN81uvS|$b+o>- zgBe5Xs`qpL#hYT1Ik#Du=uoxun3mGsWU(lLJn3e!su-&o8y&FS-#{C)UR{T1+`3j7 zx<5IrJ?wl>{Rr(+PxP+<@Xfg0`eTiAa0WZqn;%S9S0UI3`HvnhMncr{)W?2=^QJ$O zf3m(~efW5iGs@J$%dNjOOJJFRo|6L>#cg<^3hiNcA4lkhCF}bF6kjZa+OcYc<`Md0 z$*c(qTy7sw32;?$ZzPV1Mptm-@vq8+#WU=0=`2w8bu5VXWzT4Qp$)gPR6z4fGof4k zYbq|HUrC&@yIHxIlYZa;h=cyS^oS`~FdR0@U?P15WO>!aq)pxPxMvPMZL0c*g5Twa z>8Fk{@?feCd+Fh1oP+1evsM2*1#$!fwOK<3r^(c3Uq;N{_Ha>4pm-=;EyqjH40c6D zbTxS1j71L{Z(2Ih`39ghPkWHX4!jJbzQ2e_0Uy*QV5X<`gkY5R8&I2~%?2`D<89Wx zI6doPl_$;b68_Z@+$v*S==#m{K7>|hkXherIh?Da;r%!DO|^XLKk=obExnk?5$Wy;z5?Yk8roi%N#UVARmm`U( z{mu(dVA;GusZ_g8V{|pAj~mz~O|F%4|MZ)~tBub)a+2zcR~Ed=)@~v z{U{`sjimms?veb7+p3wA4;F6JFLG$?jx^qJEFMU=wT8&MGP>mcP8Kc!&w9D|=0P=v zh!S!$)Zn>%PsT~dmealy><;TDhw@?4mh};iU5wzQXc>wqxD5h*)y%tz#6-)sih(&+ zhu<8x`Ab^GST{%+rJb5FauW!l5h$!4^3=jt!dtoCo+8RxeBP^_@kZUF{3Y1fqsc;1 z0b;sP1tou0s`MR*<)mF`|4be)YW#0n9J^TYw2(Qs{Q{J;c&rlifzSi~P}jf5b4mN% zc%)g8v}X{QmEo*^{fIOGTe2f(yGHnQBWom2ZOXsu_K~>hEl$lheCBP_8Gxu)AH2rhUx+=|=!*HCKRi8$xP+2QvJABR3Z64YtPxTM$ zmfgMH`X0#GdL{9)i%nkK@bk5wxxG{L%WNI-_VWW|c8FyUfFGg+==6?Rp6@%qp2hk4 z7n~%HEvM|h0(2}~a8qJn9U`zOI*0SO;vB)cS=>Zy=H}xtd2omY|Kj4(tsX^B2#NP>#Ten4K#+4OBDE1AS%1cnQN|c6$Qm&BYdJ4=<93WP{7ko zHYdrVMxiZ+8+sfHq#|%ecmUX7@Bngfo=iTY+0IU zX1-F)13Y}Wu#H+ojpnGLgLq`tO3ViJ4~I}B<@rd~N$gG_O|KtP$eevdMAH!twedH# z7&fNvv3tdzl``Qkb9;%yINu97IrmLnl51GRzsYk+2vFg8N$+n`(R<-q(8Me7iWAVP zunZKKSB!o+i6rx96_CS)8huz=wRlH+7pEl@9Hw$eohf_UG8gv&0pgS#sHz)hxWKuR6Y>!s35`hpqMF#0dA)-=)uv zg8Fo`>d2O9I{d3Y(w7V)^rL2IdJv-j^O$G(RH7dK2CkMwAhDTk^k{dJCvFx~N?kN|EA} z;ts{#-L1G=a47EXP~4%o7k8K7ZpDHV+}+)!f8OuAcl|dr>trUAtaD~hj%<0JeNcs~ z(2aP(LWC2J=6|KIY`&`aRdDRZ%TV~+(W2L2%Bv%m0wa%#qPVy1;dj(~n?^WH#mH55 z#DfcaE#}jfE|)}hPLvWn)t@R|E}GomSGUx;T2;w1-KUSF3+67{hzc-|{y_=v>%$4H zK_I2k>}B$1$pAzfvmjs>U@b7$0+}lZV>w-8d^KfkO$KC;kWoIfjiCYB?nLi18$%*j zDU~Fzc2I*)lyX5t+7~Mzp88=r`V}|gyE>1%wf`q*O%J+CQ2BVcSyVrj7K58v-#^{ zIHi#V;s)SJf{;5nn?!DYc_3>{9ai8U)R#%Z`*7Z|X|vLXD>FD9(UiP#8n#r(0(Sui z6B-45OxF1&XtThE_mF$3b?D$+_t&sk8~0pm3B(}AsHxYcH10n#uWimwB72M#Z1sW* z`TPhHqo8nP&&n;@icF_PahFs>#v*uaU}9VrRFFF}wD@U<3q2qIBcf`6B0V?tJ)xZK zaQv7J8f<^wZ?@(RI=LOcHc&BwR!c6kn&0MPJ#M)El3f1|>)ckgLGKjVHq`5YKtXY5 zQ$NY5>=%S*IU?2Q%4^1F+U&qM$OsLuWUTge?-0+HiCq2XHaijb4)#S>>H^CC%YF#3 zM?}+(fY;igM5UbV^iD_FEd-`&Jx)h4{XA>j>5F1s*Wd;XHrQA^KP(B&xTM6`I6r-4 z2ZJv9&bY#LpQ*&C2N@*4x$_is>l%MG6=P;?K5ST{TLIXN)lUjDmD(Z3bn%Drvhn){ zm?n5dq)kVdo{;Cnuz{YO^YLdm+;920POL&x>FPC>+{TlRu+5GcD}nW`_fTer-PKF7 z>Uc)Tpy2o^bwZEv#x(_AAfU>A?8Jf>i)=|zC-53 zYN%**ir&uZXRPWRGd_8ktMWPY)NH%HXyvj_uoMpLRyOOiYg z%kPu7;zhy()M`hJ@!I$r9s#h&Y6S?5Xr&ZqTfsW>gaF3+jn#^VSGeV#Lou4T1nZ9* zoY2f^lLr=_0-H7sV+@R>wuD#{lA@0q736!R!c3gXBK1(rp9o7KQh7AK5d&f)UbK;P z0_Hd9F?y%#pTWjOYv%-}L+MlP`1aXv=Ff`}k~Oi(QY~n;OA{QMbuarQ7H$(3=R<7l zXgh$awlCfeHOez~TEYdXIrs7Mk(O4G=J=@QzJYEBG^yQseSC$#J{Pk=3YyB=g!kwb)nWx~?7+Y;kVwb91t=4fL-eiwALLM;xiTa0G!$n;5vXAPVW^JBy9+@)bn zaZ~0+w$wLzjWi~;o6QDJY`mDi)6g<=D=4vpnurZp-J=v=#b!gLw21Rohl2 zTW|Ty@$vidFAlv`TCCjpt%K`uTdc`7(r+gTHn=J~`9uVStGzeDgI2PWb)oRDK0Uhdx~7&ut! zpF^}-zBFxePcSn}zLFMexQG)4K2>gWOT4jrI*&k_?8Lahc&=pzGQZZ1QJ7?c@f5`) zxt&NaK|+$}n()hKoSM4)2Ay}sBG~PK!JVBhM$7T2Qh{fEoZm^{rZM`rb_1m$`e*-! z;RztiG1s7jYX8~dbdkn+1hjIK>Xe@aCKRKZMT10@wY;#qNwyhZyjl_bq!uISp&+qX zw?_O2g+aHdGQ$({IP?#ywK?i;Gqm&v>gWtR^s_vJG;Y+Bi<%`@kmqe!raDMEOv*%n zyT?>o)_bWo{eGJTv)|!Mv-GkHTk6T%r$)==aWy}-SeZ1#TX6bkTCX5{=X?IQ1J(&o zGg#5ZT~^Fh#JTBx!6oZ3K!c^xYDG-8;i4^p2HXzz%|9q*I&K(G(1K#+qXog2qo(!M zkQKPHu6nM9%za_<8Z%$EaPuCRr?D(NyaxXr8n-70!Ha@7jmV}M|B)5}xT~@QLGQD4 z@0h{ULGp95a5)|Z%|@_=@NY?G?tT@N9Jst=o-Y0f!FT+)bb(7j6>n@I+_#pYbc>i?uE%`_LV8i`tH_FZd)pr3M zk+`n!A#>%|nm!%6H<`P`So9T7etFJ|134v3J?6~4@xT6-N0WUR5fBUj9DK~?Z!q1v z8r^wut>#a1cFY(V&zgG4ijX;|;PjC@T!NZ5f~)9aQ-7cB{T{QkuPS-8RB*Wm3(Y~K z-y@ZGeT>$Vtk$@&P4cdE|50*A41Y~xX{B)hxffUH=J0;{1$~smE|al}4cO%qN6un} zgelUQvWooysEUl*WC9&L4y+`7(n|m3X}FyevlGG_2ZJ_+{kR18yf>YHP^@KzSMMd< zL@-;%vc8LGu4|aRz0XooiewIX%&bjl%iMEci9|lA&e1ACn`#QH_u{JzR3!EoC@jW# zWCc=5C8SjYIz8|aV##zi+!{jCJ^n64>hqp{@f&C_!Nk!7kNa=?he&5H?N}H0mb8@? zvZe8BQu$h{X&005h_?9Cc35`i#|VDwKwRVHr7IT8yE-RYZD;|iTkp{t73*7cJL-c; z2U_88Yh@DzUZ8!6(a>Exq(#fxXqB&p%t&G<{^Wf!+Q|6Q@_c@9L?IiZ2BzX=>FCGY z`cn3}6&QAA#qmvfVNnB(_X=|i8}mAsP}fxiIm#(niObR42{*5DT|#EQ5GcZ1fn7?v zF7^GqOV)bwb-vsxpCvrMC_Pom7u_7`X#7foRqsUu0OutbKdT zI=4DKq-9;H=(B35lf2@M>G%znrWz>M=(u#XR1`g|Ec6yeJ`ErKSL|wnI21jLkAgR? zBW3raKSNT4(#8vVnr*swCc3v3d@@3i+q+Aw`%VW!$(;_1QG?r$e!MchA(j~OIc3wp%76|*b*;y_#jYmX-@>d;;818P6*IcVV~ zQrHVM-*4T3s{8&&HsxGpZBJs!tM7+!5Wh)a98lS8HrQ6Oxne_E@YS4*k@4=AJK@#} zp~B4{%CsUMs$<14qMUp&%J_!ffgiZ5JP`I0jWkk+&D>j9AE)Au?ztN}m#G$Jo}g-h z0RqqARpRGgn^UzL%9V|c+U07e;07S8%_>hX3-GYLXT;%fzly`w1zRx5bAUnXGN>7q z%<9s#ryU;tSY*%&)`RfijhHgM9Ir#XP|3qydCs`1iIm9vOe){2wQnH#ubO!FFkD{b zjPG#3R5G(NYW#+Zwa19=qB`{-Mss|5hH=#X+=oo9WxDAO3|g2(Q70y;MiQ;r#U`w5 zm#mL^)>*#Dpp2Q)j+XIkjtn>SxV8?A2S?+Ab?373R0^ll$J^uxwSWWFqi8V)?8=%j z`Z-|KY6p!~Awl`|pNn4o28`nrE*3v7y7nWBPGXZ=BK8W<3SdC7%_{7=9HEJ^M0Ei! z?4!#$Yt;$ug1+)9*JPn#+t`V2eUc_uN$>a!ZaTMje&H!(J)wVdZAirp=i>eybmKDK z%_*txU%s~Gt3D{%+x!pe<{Y9cNuonR!+iP#i-7R?^Jn<~co-o*91Iw;udL|gqAE@h zg~`}CFd-kJC$Wo}B=)HW)z5qrSKj_E;&R2IW@K8>aGliuf3T`>!k?Jj&E!nDQ)>A8 z!y@TyJ&AZ<(6>a_{0NmD_l57~yt?DLxsaM~;HnBBCF3@&P?+p&Z~X98xsnTzeNbe@?Kk0Cn+ z^~&M#LF3(dp&)CR_o2*SgASF^YH@M@zN_z|G>Dj<1Du^E4f>yx|5Hpe+JU|C*Z=nm z|L=tbe+r4g4IFWhs{rJukSqUlBo6Xv+`;J(BJ*kSgtT!7r#&FzeX%_t?V@Y$t;g+R zbN{v0Lv7Tj{SQ*UTAwSOAAP>oF@2%d5j|b5sE+yz;?qY^H*-;SkBmO$oIWdriO0YA z67QbbbE@2D*fVt2BI*f8Dig>TAX~li5g1}|#IL{vFUMm^UAfHRW@VwCYpe^*@2Y=! zbamD=JbOv& zDZOX*Q#vtt$({ceLFOtQbj01~O0ZQS5*U4k<-Z6d&Y1NJH!RrZp6$Ci`qIgvE(kF2 z?hF8GB~i3#b79OABVKP88mAC~csG%B0{1TM}ket@s-sTBYQc#=Ir8F--^UMlRJ``a>w$W$X z21T843JceXD2l9n>A~_hvK>8RRQylNQf2n+0dk?^3&y^PNuse@NR{^gQ>7!@owq?< z2SR&y3@4v1>b+X4r2`SEgL}q?A2o52>l~3m-#@4mqHi6Brbmg++9R3qbx2Ph1ChtA z`}8DKJj+j9aU>0=cWu-!(ceC=`3THK!>IH=qE=%cT^N*K&H4UjTwH3`+%7~?XjI(+ z;vW(hJ)Ad$x~arD^ea+Aq+w#?r2kdqLWAGXsPds_jEH(Ro${vHl#juO$teBW`a9EYvni~pg`=dv-1gxmGjU(>_jQNT##aXQ zbr8gN;S~yU4jX6=cyuTn`3E)so-9RmM&{E1q_or4vBJAhAm2LIwJ#oY`Z5$bZ5%Y$ zWeOY`1%1VDP`yOK?^98|k6UJDm2 z^o!T^EzJA`Ux4XKX&ke*99|)^r|~D$9g5H{3v-SN2nDq>R*NR*v}_x0Wy_tcq*H-T z6^g)I(0J|c)v5;_o>~LSp5IWoK!6%1@YvCg9f5DpI!ss$N%F^OvZ@b0UD!x$M-m}4 zV`RO~k{5H9Yqe>>0uEu+M78;pHlWygDfheWMFUg2q9Tzu6V=g4e*mm>1BaSVtUxwP zkG&iYCunl$fA4h_&BW6xv8blltZo%a=1Iki6jViTgC{-o2nzmuU@jN{U_^JPI=I&TsQvxG0C%JsBWEiTCz1Yc{-y;wF4TI3R~ z8cPyU6e!3yMzu1>7~+N*&(u4VvHH@O6ll$u#X9*Dm8Oq}E3|wT2}_NMuFtx#$GU&& zI~PqqfZ(7JFdeN+cUonqNDeoG2 zf6~jEVwktfX36i92aXPi0|5VchpGIrGFDz(7fw2g$HZOsQ#JLSgGy|Ia7DoiGn&x~ zUyAV?w7vIlbqka(C^op z9}|+jop}64vNf04AESR$RLZU+q==%@SY@l}oE5v*QrMhPzgFHlaI8j|R$!EjP|C%E zYojK=1k!9DRdH;&rIeW-XrIgv;gPF{J^+R_8m3MARL)|*?Ozz7xUCMA?-HzuJ|(^W z?*M}$xWW?CK%pgBVf9>7Ei4hTd;MSo(=~CK$R?$E7Lbm#@MQTah#$pL?jI zkr(5Fu)1jd428JfDsPF{LHV2=*F^hm#Bs2NCAH(-AMd8&S06gTe^3?nILL!BhU@Hs z>bX%_ZQ>)pg?~QrIC>RWYaL|FhPCb}RXw|CkB;rbrE$cFnUQSYXVe>UE=|sj=w&6% zx>L1aZg^yOA8lEAVKCS`@hA0!+jtKe3vT1!qm&)3T5t#D>Ek!8DJ6bBurI!ZAsx<% zz*IMW5`A^ht|hW@r?Yi^ms6J=&}k(MK!2^(JVfP2(nHu-2$Clp3u_I7PV65-1kZTT zVhl;ZW)lNj9n^gwcjPn-hzoLVNpxpWl|RK(I`<2zUHJz^!x0+Tc0P^5&b;0xf=dj$ zA~dG3=1#39^k}s-55|u(RoFGAE6fDQxqQz%`!031!1luxm)WXKWN%N3z#`lOcCmis z2idEbP&R*Ym%xj3&5G4^u9Gi!p3!fp|D0PlCuu3z>gRF)ZuLZ0o1}~VCJcp z$h+Fnfk9rXkl)+S`-06a`nQnB!kJ^rQ-aTVfx_FU3XUwPKZrS3MUU%3nkb6SkEv(j z$0b1*HVPAectifjLrD)VMx(U_%!8_gO~9Dj>&>)-VL}q$JJq*`c_YtZd15*`)zWAe z-B)_G3LaSp)-gKm^;*5ctbkkc$0sr38aer()n~fgL43O&k+W+>2F6m9!R2P=iMAh{ ztf)UtsIS)b8yVUrJ<#i2#7Z!;e@h3M9-o-N=|+EDbaG4wDr@e+T2jIeg2r;*RO_o z`+Zkn*H{ie$4ae^(q+}^r<+b?<|0t4+4C`4^_A7!OrVLd<)A_Xm4~1@*qYNuD2Jvl zw&K+n5e6fUW{p2mb&36;)}etk?jMwK&SOrOq%Ldv`jOOj@n5e8bsn;lWu{nP;bry) z|1nlBC)awsZ9Ge-LO>VQMWuXOD2|O0=MOLYK6?GZYlBL(c{P&JY?wZaJ?l_5};TrdPe=G}k7To`?oZF{L#hoe+(PeH(xzMWKK5ma_^d2sXz zf4xCXZ>;#t^Y@-UC?V=mFN?r-jIyU7oDcPI*(B)Tl0Zw~%+A$U3mQ`Ot-4G<`Pu)02f&{ zD>60*i|I)<&2)~-9x;2E*g_x zvDCCD_)r?_B;Ur%<~C3X^~dx9O>Vz&o*a>4OPsLA0RCD}5wE1;db?au{ch6iS^aMH zT>Oi?qt(ktZO-R^lOS{(#SHaISTs>fIZ<*2=qLraGhBmRron~O2~Hd78<7Wa0g|R5jMVw%4OX9q^n?8IqM&9X!c+0U9@0W zVx=)E_p-EG?8~&cUrSFLX9}T#aX>p?bf{RWNXgb z(U;=fi2~h?&5vY!KU2lUb-@}hpNH1$1?Ay6N26?ZSoQel?{PHvP52fzw)I?DjuU^J z8ZzD(slp_8t@}>`R8gFgKydqJzhlQq56(U_sGyAEn5d4Bn`~pBZl5fYw7V+`UwX3& zAQHZl6Z=zrL*Z*Zj*UmMVNM?F2P~ouqV#`TRsV4mZVt>;>!mJC-IuPzuBn9$-OV0q zM6Q+!vmL{Hl}m=K+@&_9l;oU|kU-NeMkNbB`Pc~(4tlMogz;`dK?}dWbfmmcQ$O=-=uc9pJg#`m>%l6~$07+&aogE&L6fhBNCU$Jr=v zL6xVkQgZn@TgWE8^f!;l=|*&A*9HFpQ~5mUOH_J|5K*_JiJ5G2Z z9O4;hwd{i>Z_y!RwA#pw46bW=%c{ku>};lZe$m%Z z{i7&17X#+R^VcNw1R1&LO~YAB_J7hNYCr=xx=x%b%xw)f4;dm)Y`zW1 zVDr*AyD5cR7{u(^i`@#!c=UDBp#<=A8d@X?8X(M-SBFh==Phnzw4;=~i5x&C z*}UI;QG-ZEVo)ZY6G^XSUhZ>Wi;IgmXL>J`iU^c2CVtAgaj(WL)JG@vGQ`nIW1{81(HhU=F}Z@Pc4fe(ahGS6{0UL^ zW~Nc*2zuwvC60VV$i>`;lDMnsARt*gD0j?FRR7RL6Ps3g)Um8Rc_9CI`w2|0INPc`!|$L z*GW7~mf}W*)s=8wnUsRPu8feB2ft@1@PBf=RFm=O%I+C6ru;XWT{*rRjN+BXtniE;kHcXU!eokJ^^ zEwsS4hNbGz#qLm3;#NqP5XYgdeEWOC!Kf->uV^z$!4k$Lo66HvEzLO)1${TEMhCd! zDV&_OPeRp#ixzOzTJ)}@{uU!qOFccn5XjeicB*;X7|Q|WTGhuPLPnI0kIXs1P(7NBuUbR zrQ`MDh!yL2G6wU{S&220o z{EiJ_sN;!7wojiWWH*~LG#Lwq=^mRGJ=ez$T81e=(EA5P=UzhY0wT6g+8`vG@f8m$ zl)Ut>O6ZKE!)J+I;fkr|AN8DBcMtcRdR@o%QJR)0qL0lcCHD%M!tyL6)rQSS) zVx#h8$WZi5c3CPV+tJv5R`XvDGgr};6?kD-=R^52HtpR%#za#uBeakG&Ch7mtzx&% zv@<@Nk%=r20f<0#?dU_$2~=t;f2}b&uBly&ZjH`CKU6Q>1?G{UkND$KxgGx#b)LH) z&a}HO4_*Cbc@Ww+A}ui3Gy6v2kWfd$OxO%`V=niQyH=&dliTBg!bJ}?Wd%biTZ#V# zZxm$1mDhQRkf{MHSUlLz(I$o8+k&Z|StsZSG}r{0F+kh}b=uZ*gpvsdn_(X7-c?Lz zDHgn4lE$@CIU+#q6G>}Lfp7S}hk9bzK6G}JdT0y!J?U5s&K*80g(UOX9jIPS6rlNp zwvpCmA!{jY+wv83%~m&d@ORJhy~{x%aXR`rJ0(DwSD(h z&pG!eM%CJaX!pTCcTUY(6=gn5Y&bQ;lb!vkofv&KaQso=y=0|*#DFkHVfw;5np*|W zoSSMD2<;FmfiZ#`HBli(I}a^fGdoiT4UEj>sf{|K1V$%?47j?bHpq+3uHLMKcnhj~lEyhezgMJw5uxL^kgS2*x^6Su>(SJo zDNq!*WQZc`Gu+TcBg~ir^s!R_qr&V1atLeGwTew!%Zv5r*PmoFRkXsaw>K+#5})Ke z#Og!5nG_kCs=Y0e>-$eq?xd`+xkY3GkjFw!c*l?&5tY-dlXg$m?$ol1zgY&iB zH{*j5u$GzBs{Y;&-UEjJj9=Hk#~UgR7~;OwvDPfPK+=hE4p;fzHIUUeWZm6)k-P)X ztI8?9Alq5gb!QdGU{{p%^CWZU4?A*XvtdQNuc$C8GBwk4yGsS!bjfZsW|q^y^zeZx z!w9v^TSr2v#CXev*3=Se<*TC7@-)9tLR?_aC!my$o8!Z`xRZePrJB>~@CeO89xQf2 zN%3<2dtLa%hIyecc^#bxz#1W+O5i|)nRXK~u15a(OKOJzw#R7$Rfap`x!6-$RSEh* zMPK$Gjr%8k@g;3D{;d5^dDI2x(Tgf^b$PC_C&0PH{t9 zohy97rI!={kd}uf0rj`B{8ba9$?~Pa<01wYL}bToVFYEe;7Shd=f%w~gK>3T zZ!nm)sJ^I${%5sEcF^GK0&fob79rmvzuY=z6*sL7Ds%=Pfx2z|gIed*|H!Dpul!4_ z(ZL#W+l)!qCbj2UR{%vHcz}N4=GR=?(e}Z{z%BE>)FH2pT9K!q?X{{nb@}+#%VI}K z0AHMW22owLVmd)R{VYtVWJ_mgt-J?Qh~vk&%0Sl-S2x*K7oJKU;n-<+_o5qdW-nq3@zhb8tbFa9@=5J6`HAT|`BL=u1 zsJG~a<$g4ZXjgXNV1=9Y8hSFICw!i9qix*;sBED{E zalWT4`iLed=npW1kV>hh=ojYjjRCp4rffNWUC?XuU|f@n9+I?vHq@Bm$F7d*-$V!2 z3nbZ;hNS|7`amO(B}ROQ_oA0zr#lk$y;V+cVB^4w^iaQpGxi_%5A z`bT18>6X?_Cs@5xb}VgPODb(Xz-*@YK6lxb*3aTWAJI_fJ*}e@!pvzl{XgZ!@_)zG zQVDOH%Ya~ff52}b9ma}3b3=z^M>lp0mB+rLb2uUVA+Bbg=@YL{>)UvNfR;F9xjKDo za0z+4fcRSt_Ip&10`?l>P-m*;l~fmevQ~6B*RN_LsNrW)FRj6rHr**4yyD?RMzxA5 z{-ZrXizP3LYw9zV#BzFwfi`m->s^)eH@_|lisx&)npNUYZEH1efSHRVp9)a8vD|^X zedboVIiiHSo+jd14(@}83w8jl-_u8*$=4`r*5|{xIC74w?pskFLyRx7Gr<*?88PJK z&owqsrHY;@1Sz55C4wY{)p=#iuoI|nt89+``!)$emUHWAXd+HC-^UjDR`JC)iAQJuxCLOQF!?mOD*m2X9hK8;@M;F{L6sT`O3hv|2Y6S2|CI3Qy+vKJ&|B z*Z82vKXMz)o?S-<&y}0pLp}$!z6+OGJ6*Q|THO#w-NZG-K^OHn|EASly3=(pjCE=C zRGG;> z7*>?9sstp;e>Pp3=1cR&R}`9w+VoJ=dtp^8ByGyqBrUIE9$=T-8H}dX^v`Y8nff!_ zr`92kdtPhVlMPw(&O=6ONzturxAH3_=yIu^q~w-`oGEOBwmRp8WZIu33xX3>FZNte zYxWBFL=0ZIl8#2;v%AFuBnH8KV~{FzAR;Y1A(bq zIml_PQC?N-B_g}c_@M^*9;IY?HJDr+WCtBi^-6-*ql($5=J&GKh z@`P5S?qJsm4!MROAP)XjNil){pu3Aag+{0re%MggDDtTRl7xDF%|(jZn=`v?dQt|Gz<+3-eYVI#9APU^&%zFl~i z0_-5hc!q=APQ5rOmKzq_ys67)G3&vlYE5f&+CoY68XIKVCR@Nvu}p>gfLJgRxNAXw z3#Za0vt7Y+P2;$&E3w9+dBONDLnXcqEDEG8D|y3Q1L_g&#Q+DY^m#ftI|3Kz22D;n zJ<6uas%n5bf4?pag!vOiu1hhSh0|`-Ffh{Nl8x*@8|T4D-G|ovRt~qmQoBa_8Pd07 zpQUcQ;LH*}=#rF(>JEG;voEtvAc*=7l7#~h$*%>tJ}Orlr12mOX>Sr31s`N68<(8% zh_5ys6vsGWx`0c7;i`WnyDy$C?byzE&IN6>kUGFX>PJIq|BQ`*iiHb(40_GNH!~X- zhn)pnV(P2*0Ckv9Ya<7{)M{CKeav;YSs2OwQ8+cH?*3Z2W56s=XvNc(=D-TV?cFhN z)@r?Xi%`Y{dN%&Uy#4y|+Ne{Q*=##iEytprIgJp)e90R%S#1458267;ur}c$YQ0-z zeD{47INSjq5&Q@76nWK_QJ3RqFxrsWM?WJAzKYc`*GTMUL5TtM&%T@}I;}(c8$Yce zc2u|8ejM6LQ5v#>T%ac*Rjy-=_D?y6S1qr@n;rExc=%a{DF=)@kmWAd)cN+02=?hE~RA69T;?;&U2`>T5`7 zwNQ!1CSgr|m+Uni>2>m|@(I!S=fVg*ZOzJSJ-ry8i#%!Gi4YG~|7(i>M)Ozu z{!|E?2M15W-8t#-?(cS?I;C!)+vdEf-2OS6!s?%vb*DCYv(?MD;+$ zz;+lS80&@DNtziL$A6$Gvv1VWh>(xb`bX>Jiyx={q3eAtRL3y+PSN%ErE$A8SwPy7 zw!}10Gu%--XVy8iMNxKk0|^V8tCH(dbgdvOLpi%B0sz#14yo4ScOvN|FSgYsAI#c5Rg%ky0 zso_oQUZ|eMrf+yUZESz1U^;-9I^_vsopeW^Eh1HnQ|9Kz30)9k zf7f{*_XeiI8A4W4y1yk}J}X8qYajVpW9fZf^$0rT)kG9CuAyS3N8f6PX&+Z_Wdt-1 zVe$=Ze~hkxtWZ#bG=A+gCgvDzx7O;&Oq|tfK05>vAU3`w#r8=y2PWw3@ZZ4?JygPM z)t?2NG?Iz2r8QoE!a4)2{)5u= z{{+m$Ml@Pwb7%M#+MIKWI+RvQ$y;;ODr&er>Y&0F#uxmg2?a;E>pUQp0q??~Qq76m zYjR>WZyFB!`+`n^oqz7n%|q96Mkt4u=p_QaV#&b;HK)Jg?lNgv`RK;x*e`cHeo8#5 zRjhPz&q_kduVe%J@nP9=Hp&Jmmhpy~mB*ZbQ^?~=V0uPR3(vZYp9@s(KJt6Vn z*hYglp&k#U(9r+MDyQv2fb_mV2WE#YjvVYKrMZd08UlW z-)ehVc<5I~t?t}_i!OafPayiFwMR6Kg1xw}Y_r|h;9tHNjg$1R5d!b_gKNYk*ZU>loRinFsUD-)kb<-~1$J5TXlq1*4F!fuqd6;Rn z#^qW^Y=9Qz6M9y!X3N=3yR?q-t_~BFuDRflA&$FDM$As|>H9(En@UtK4Ih73asN^L zF()H0DIsT$b6x*Jk7`$ZJ;c1pMho;39lTmbU8!q6A^H;rlRSDMjRAe|GCYFlD_-_&-TbVZIvpb&WPc0%85!JXaA>ObGxE;b*ek#e|PRk{y>Xnt=kTh@#BNd$q z&w)QboQkWX6Qjcbl*{N6c{Gc|?=$(0Pv_f3sValubL~uN?X?nY#tf#YajAEz?ERF5 zu|op3p_6rJR2m$J)hG21DTe(Uzqe&`&HAaqfXBrz8|R6Vxzay!J`WuVzjyPOkcpzT zSsod!g-`(#Wt*Jd^@EJEF~V6detzKBvUn-YnsUHypN_s;MXamIgk_|3mV2Y}VJ@jH zHgPcFDXhnBR_5mVCnG5l+u1LPGdn!?ij_QsXIDBl{OG6hpUvvhbsmEu>7X#kkoT=q zobqc4u#-6>f&>x8JtVs|3 z9K)sBq@b)UNAAkgT9(n&-KTep3@ z&uBx~`T#`KwVsza=5~hlG}ts}3hQ6co_gp0@hABO@FnL=<|B92W^?p9RUX)tvqVE? zYJXo8>@OwO-SSR`T3^7G!9jnOzN19EWBxhJrZE3?aDZN})Fi zJJ5V%9|wojT!+WwHxv=v<3EvVlrk1L;nhTmc;nsenX<-1^FAeB#gA9x^{L) z45nWaL4qJ7ayoY-h(?pchxUSLA~$x=Hv4oXO1Z5Ncc&+gM(sY59nS$^O-8yGMZ%~y zu*z}&!O0^LH=-p!nu>;mP=g890-BJV zk$a_%HME5ELf?{w`JJRoHdW;i>UqlF%9>yctXA4+ZA9c%kYLkzHx=wPYK6$#fPikRQ)I(tu+ zyt7tiE;IVX<)BmunhU$vSBJOJplB2q@mgSuWTmvoL)1$xmg$nNqC)CHs@iHh=)hu` zGffwn%f_`g#(z@8w_|EI@{nWyyCNKPs{dmr&<}F~pRbFdrznRMdF+%ohvkWn(_413 zeNd`lnLv}d&>kGS`F6*6X4uCEH;)6@X$?@}3!oJx z5jmo7^@c@(nW-i>l~IIHZqDR?babDDT)r}C`9T^1-A1v`GqMu%&NrBGSr zxNJ7t_%SY{^FbLyJL8}4R71(Rj1*())~gj`Ll6Sih)<-?iwecRO}D5cce7?zf0FQh zoPF~djD$Ag&&aH@QaNHW?+7A#UQqcAhq!brxAMJ#@dhbSS3B(jxTrmX8N#vX_YdmX z@&DEh1;v7kug8?RMY%0G0Sz_`*3dKxTJ&y<9RLWqMW))8zQvRu=J)9IOgTq2ap5+O zS=u7sSfz(m=E3n)lt-xivKarts!WW}J!?4zC73DlbszU!1Dz^A;HLNguj1R_xxll6 z%)4va@xw!|)eB_LhCm~&9!Sbl8~mL3HtpxmdQ^KlcKmQo{8~=jgmOwokNgWrYG1Bd zK2r(ExEb&;6nqwzg}*B%hLiDsJP9a$S5(q{$yI%q;Jgs{<`pW`1O-tAfK?9 zD{%-o)-^|FKF122V->A4pCbd#ky*&GOcFvrNIfvjus6$_Q2qxMB~Y0@3Hdt=VfBSG zm`vy)M7&9;Rh`Igi0BEgOcfg!sji;$T_2`tG`joA;*ezWaATJZPTTS3I_FyF0{(YP z&Qol@-p1)1UHSeped*&W`zV zrCf#TQv(W>u9DvF8aQ){u`bW!-fl=y9@QafInm$)i2gxE?>t$0qQUfL{<*#ac04VH z)jDHjszUVStwSp@68Y!AFQq51e+Xsadtcc6@`rb6_;B8?bW2A+N8LBT(f zNM9oqL>+&j%#>_%zI6jcGUAD z!02gl^U&h?+0_J?L@w>+o1iq4J9njCW<+Ojx+UIwWx50EWMl~u_!92SE{5zOa&2Df zGAMR2OKIoucU+o5o4%$lh3HCH7-lUj?Gt8KBEBcnu3GO07c~jY>ovpjueXn$oW8V6 z_$$cVfGs5O4=SG(m0(EINO`7}BaTQR5zPxcI?yDc*^Y6}!qUvh**f{D4Odx0@b}|= zmWTnyuaL%}V)S$BPGVysPYOFw(1)sSb&Zx!$a4uNv(z6ive zUxk>%3^3kY1xlGx4b6){Ky^v zulKnYhO;A=1)tj7qT|g)5BF@Ke9uc3qe^FfFRGq(>6O@JObG4bd-a{?%p=xrVb#FX z9jv5(cu008!R1>nbZ+ji4`<>;v(&8U!mYPPB=?hL+h_j*4|tW2-O6zjf@;fM;qcOE z%z=xCh&tr^a?`?kA|m7D(vni%6oF5TCkN16bc|py!j?p&C@o`8udkl zRJ)h&lrYBzFWO3eIbOR0uT7L;Z+8Ez_;`ih^;2B1&~C&jmbP-$hoHcg^PJ}VMmbFxzBF_TQ-$Xoq_{TOC;I00>HbF*uVqcm!)*McFor_zP?zd z1*W9(Cx{UiF;Gt1+pneMiyUXlrkd$yE_`2H?VMp1-9C^zLT1otlye(uw65YBT@Z^7JVySiAhZSE8byWd|&%X;Oc|=@Bc^I zTL;AveEXxju)yMr24~UW?z%{Dch}&-g1c+5;DkVM*FXsFE(z{VAZUVw;K^@t->>T4 z@2ywwpI19Y&CZ-YedKeR+37xgzEDsFN_yR?7N>@-WRf}fj6dh$TXx#MN{s4n3czSMEizE!3J2{MyTum$l$rC#^}d1?M-e9^JF zAw&?AwJw|h)8v~sSi=!BF{=WT)CBZ840HB+D*4WD@)Vx)Cy0?GZE~Y)@8nK#CUAzb z_czm~SNmnTEh%qAjhohf)aY=TL<$;hWaQP)UpVs{~EwS@oYk7{FabG3A?YzbEITe$Cni7u~er z1oROt>Q-OQnuUsmq_cLAF%Tu2XtMV*JD&)e4ZnVe7Gbd9TWp+F$!anD0E>c);(##Z=QUy zH2!>!+YUwZgiHeKkvTz%}lTXeS-xbAo7IuF7NPX_UIVc}RvKN38BUK0) z{~aiZZotPxxjSj$JjVH{>1-S(LKY`ix*Xll4X*$Ovh4!st52M8J$Bm^*@_l-FFeC& z4%LVDFDZDevPmPZg&JzZ!PeD})fC#(xqTu955Uky-L}8;qZ`JRB=`JivK!kg(}p}B|Qpe9_9NDNpVVx}BDA~`6;@F@1eJ?6IdO;>9wL7@9^%loma64Y=U59=l ze(Q-mm|xxDQ!Ibur4x5H(kruI7P|qk@;CPn-iDeVx0^*hv+_*5zuiW*#BkYvx~x#O zA^&4Rl!mfSLjKkm&*tUjthod5V636<3Bw|^tDzlOzKL7W_*_T2Xu0sH(nlz`lhpjI z>Pz^0)jbvkw&_pH7c<`qGtmc^3c6e+2T|#ebSlKiZ*f6eNi;jB#r%xT+|XJ)5oh%8 z1mipo{QmF0o$$<}WJWtx4)7fh@E8x>7>ME*TR7KTa1fvr)=IXXnaYV$9F>q>2qubA zq$ptQ74*HsC3odV;x%>mH1}osq@wY)BS!n0{V%A%M1~g|>$aP*q;WsE40gg1!EX-y z6B9kZG3R6P;7AZ%RJ&J6naye@UR-rD@wD?QTboi~P5HQ6XLnKgj2miAXOM~Cm|FhQYN^D z?e$&TF1<}EKq4h`+4#H9yr6GrBcVOY@t2uOyiZ|Q(?a~wxipq4-s(-2GX0B{ECY0&iE-+yqG%466*?)B=H?(M+z#9dR~K26^`vhorrsj zc*4fqC=OHOS!`Le$oA=IdG{`h>5p|T`uHZVD?r8FrUe1)evBTzf|lo3sLjQ(Q7;H2 zOZL0b?cKNKmyQQ0FFTQWN|nM%tP{2EL%_`;F_MZo#q1_jqz1$V(;h4Zbcwf8(=WS9866VB+dnNhDJp7vh1&9dw#9b((UAhyDb zAd5K&mxwt3l5Zz@;g7xM;6p#Qh7&v8Uc+Qs&FY@7;N7A70AA>bdh_I*($hYFp?)!Kw zT%H%UzFVy|mHL(J#KksTZ-2~5&lPx8KfY^K;ky}b+k~W1i=$iJ{b$BVUKqE`3>H2j za!4E1NN@SBg(@Y1=4{Ss0|Lq{PZmv)6Sv3FuMr9$)pq!I><5mFd$WC zBBl6UIf>YEaD1&4KTibR;l@Os(bn>gFKO80UZ%LKD{l4Kjm-<|UOHT~-VtpbYEr~4S(t>#Iifo3eC{Ov9dDFn zW-F~}cAluwQq*NMS0u}zJ&kx(750qF-U8<8P@=O-Le`N^E3{cVP>Q*36r_HB~U zdotvs?2G~8`#;1`pLeC|+hRQzZ82V#BlmA#c9cJRP_H~d8%1Gr4=g?LM~o>|)CP80 zeR`iKNS>4F5$-oV((=*Ou6821?u9*{cPn3AX`A_IA4;PZn#-MEhBYSf9^t#8;0AO$ zeQRlGf~~c8@~@2Q6P%s}DG50c7?d^?TL!+LjGAF*>a0#Ig9(i6 zb83t{|A^Ib7*@?#o?Ve&;M$`__HAAFAiT%XQr{sqrCjK@&3q}!(FIQkX0eGgyOZf# zmYD>lUm3EOvfHoUEn_>(29;A`8SR=F&T5I2q9;v`PX0*&L3Gs)Tl(LtQ1cFn3B=*h zaoK8|WA*gfP4KHmsBdG;FyjiRX`>9=eC7u`q%Ds1U^I612!i@(gOIm2twf+gVr5>? z?Ui7UYM6rB1sedP%MLY~NReoEKX@8V?j)5s-LD)Dq`RBt0mQI>Hc3|NatL(?&EYke+DW5qEt*70@>;kx={uWcXQ5=0 z&m5HqD|qTGNSvn<_{IlB2Ni|-Dg9A&jN~xH5P7A#h~{AFN8_eP*m&?R7VNc~tf>9g zGR@42iK6;f2Ddjs{G!eV8F#EXe8#uZITV&46wVO)cD{-a+i~9F%73}4i>ll>%*&ovb~b!Wiu{E#31yPrmW_c56{Cfh&% z6&+FfnO~8{0^|Bmss3wYvxWlWr~R&!;+nI8z84{AygB*R*Rx0~A75QFEc)iXsIP%D z%hfR`7t|lzK2bFq7MRvl)xB?R%+X=9F3zrKQ=rkdQ!eo_!CIS;m~GLtmlb*WT6M$d zOANzzTfTG~{*dQqSm6D~$!4ia%hs> zp48Fgv~rz`IL)iZB0Hb?OqYoaZdv4b#cu4vX74Jo4W2){w&W*MN)(96DOt;B5L(K29(#yKam|=RwI99W6*TZNHxn7= z)}B*WFv8^3F5lP}*UkB?!$XXMZ91_ow5A8_==qF{ePW-?$et#q;(dK*M^ns!9+3cH z(gMe}>MCWPOw5lfe*6loR>F4~^@{CO47y4S4lD)~RQ=89!U8 zLZ2qkSzH{-WtJoF8&JG76n%9L4!WpT<;YQ`63<@9Ut;xOQINHeyZzC`T(@Y)yvQHZ6B>s2$=^wvW}pn2)r_GAp+FL|0&T^+a3w{&EUIrfSfniw-t{4U1%=MCdIPF=9kgLKB2^b$euVa10Ms*ipTE(*?AOY2k?nS{jd#uwO#Z#=5A zVK)94Uwzxa`RXAG!Ziy`_7|N?K8iY&2MPLWWarp{B1L8i*aE7W7x2%I;GUo&+T*Fm z;G<}3kB?SlB<>ib+W2iPJ?p+>BMYRjx>gmA`XiTk!(b~)+rOjebkbREJ71XUWh?PD z1r;5U^c|jIyIy(aROo%&X^fD_lj`vU~e0LW|5KiEY)-k-hQYy1Pk-D?F;H!1VIK#mj=ftTcDHHk@|i z{bhGdH*7RJNRkXNN~xZrm+)HZ(dhY$9=bf1pJ2BYBKx zB=Nl^aoF)z*EXr|^}_Cc?k`45poC z&8KEgV*S)9@*x8P_%`1`SRrvCGY5bT`VbL4V} z)7DNqc$34@+y1#m|kPYT`~V>+_AD@=5&i-32*8-^7pdSuoW-8SpH?( zsyW@+ul8RVj)7%u7I@SPTiJA)8ofdbI8hr}NinM^8ip@L^xnQ39-R<=J-%V9HT=D{ zk`rF?iOsH@+g0zQ;jVnJf1k~p1I#LXi2i3@y1Wme;TkNcn}kL zKe`Ug4Wn=>v&Bu@Vpn|1N=AU!2X3>pwGYfPINDi577cwpoN=|y6?3D!I<6xA&-(noAtA52!C` zHy4ywh8*I7-zMSf60b}rUSd)-aw{FhWBlwqX`m#HQ^daU4Y!_QORT~Xkz|@muGHin z-;?1I++n+XC%#e7X(ClyLa2vF)*jiQh{_ejd&RCVYghO22XY&wWy*f7xe8<7Ylnw; zPM30b;_dEv(8tg7SueOJ&q!;miR3D17sMGnT~nr#ez^_mFrVzi#FRgB4sVjD*kS!r z1wSxv{CaXi@w3t}30y7M-Vp=cj3ZNM3}PQje0jYdAxpF{lPz;gZmvmcY8= zkz@#sP4G-mGj$*!I2*EzaNg4#lrW#4+gZeZ`1KdGkdaM8IRVGerZZQ^pE2^A`l_7Z z|NP*gJco%Zu<<(m-U|HrnTHw$?tOGhwWeDl)`yTu2sG; z=5vJZt7gLKEniV{w;tuqmp4ulw6(DGow!RCYl!7w@k7NnlLgDxDt9owP&%d4+&&i5 zO9q)jZ{}NDjFzM%+~@RW9ZW%z_Gmbw5+kr6?Qs5~JWFRUTlxCg1p#o|LN{c72pL*Pg@_sKz32*O;E6DcyO>t}X z;Af~|m8v#*yR@crUXoeA3NqEA@4LV77^TWM6H&AsY$aUf&Wwq*v=U89O9I@b|^3qB0$NQ3Ri=bm=ebhD6kJQ2@--{V_+c>*h zRjGQ~`VA{gRD->@Z>Z-n<|1%W`NN!h`|{2TVghwA@>_TDQF>vf1G@~bRDf-Ie)Zd0 zfE)KbG_5w5--(IXr#^B5L)Y&QXq%58MqciB*NTl?a9T~^Fj9+2xYf+wyH0%d`+jG= za%aUIgge8{y38z(SoKLpD;dH3&RvSK+)oqwXD1^<6^mpAPEs~B#PXx!xx4{~c9+-J zbdCkHsNhDH`NLFy%hs06mQULIT>A5~2E;K~3njdKd-QGP+kZ&wEdGMjQuquUWz0M) z_UUT1=fe^*Qgk1uP`+&%fBa2a+CCMmer(u2{K8*25wtONMUb)1`X@=d%r-9@6 z*vZ7+PghF(jssRQyYbiLXKkVt&}1giT3h{=a1oEAA4C6!4>fP7pl5*G#dnLfE9}xA z)Dsftawr=C9#^HT`Qkf(0X9$}|G zKRs_+yb-NIbkTNe%NYG+w)%j0mHtHzC`kVn=LH{Q#!okGjxVfFp?997tJfoSBwnU@g4UHB` z^wRFxry<pbnt3R#K9&z2T$N-JFQ{Gu zwy}*S3|7xjcMrMYB6EHw8GEi;`}~JZ?v^vRTs^wee8wU{D#s8Wrbr&42!Z4@d71Q4 zr?zUjN}whpYPr6<(NXauYp2(ea1z5GF=Z3e{wGjSG)51aUkIHiC z6Y8vtNH_Ke;GV_mwx8TwgP5;R&Nvd~+KmtBNmL2HNNWC~N@zXyVh+CTq64aFhC~ad zGjD_)V=)GmrD1GwC3U3Y8cS)9dSx7<>F;O1apY+0Cb{y*!j)aeK()U9s+eWsS^UQ; z{eBQzJuF%KT2Qn|y2#wdy8nG za=0F&&g=0EkKg%elJE?9r~8_ScGFa^qJ|G)?M*{BE)BN$69bu5hP>R^l}ExMOJ>>*WiHxu=$- zMP#jR(sy#7BOLflGnyzLey6Wd)V`iH(i<1RyUlF%{PDr>_>^kqVy#aT(R+k^DI0y? zsX`aDU)fBheCZ!o{%Si|eg1X&>%IAGH?f6)4>=uQww8`1(k!zd{KlWSXw8zsu4bQ4 znaa_&Iu)HIkDzCL&E*t=pH)t4qx3|v61`R$0}L&xJ^G1QotXcw1h)Md`3t{%QqsI!qR*(g545OAAj^wwBB4foR@dx{0s zGQTW+0YOL|t7wN`;8jN2&-W+ze?g1lr4tu{e0_rHCwdnq<~tz+D-1&qYx#menbgaE zej6p_kJ5kKGjB7cdx{UtW`axQQrSqAD0vBu5J75P;86w9@+)lgs z$zYn2o>UV$N*$@FR#@ljKAV$()Y~Y_D~Cfl-7EI-tbFDMss69!H&{ewm_hqrrD*Qc z2gz&{#_7uWE_XSWji<7fOxn5i6Na|F8%@+%Z#2(z$Z+8hUN73IcJ~xNTukyi9`K7OT9AG{P&n}WAmj2W9!k_)f0`jRPhJKM&i9UrR})pacLW7`mbZ^ zOXfow6zPwi&pku^qI>O~;nqxvB*+GozDm|sH&oV;!&=j8826`A@!@(?;*9VN+;d2D zMWzrlH|G2vbeGWl*>}8o_lS*_@=#~95VPX4s+(%BaLq7rcA(&rj=(17iXAj-()fXi z2-MTOYgp8cqTD(KeNpHeH&p)T8E5}y8g5sdn`Km+Ik5D>j{4O4=|ia9k|dWz+V@`b z=|EL#k6Dwq=N3`#LuMwPS|n$N6!Jeu86T)$&|(}R#J3-U-%mSC%6DbIm9p|Qh=`Rh zJ4=)-Vw^NZ-p}JKa|(_ozfh7oqIpH^^>tABU`Tx@gCB=7|N8-*wr5nH+2Zk(zm-|y zW06<955MvJNV5&m%umwa*?-moa^k#S*+^J>2cG(aKUDsr5Gr%M-g;EBPcrSQ%$Z<) zBN=VHa{W6=OoV(22e)hIIc(@jMWq9HmoSUN_a*EnZ)jPsR)Cb=cFZgBmUnMn-`1vo+sQczjM-rd1m;V)(wW#f{hAMD14i_Lq?jy- zggsWSjK>m4G<^7?2=DA(ZJ}Faww)UJ2z@3jmCv%-#Z9fmepM|8EpBvubnb@ zaJ+4-bgn(e1`9QJIm3JB6d6Q^SpCV1aSuuU-IF zaUsDXqJ6Rlq#{#^6OeraH)a2#m{^c@L!EWls*|x#0x7Z&1}^-*T`v zZg*7o%U=|CanU_h&Qxw4L{kp;-`;s_wAR;Th!!?V1l-9Fi0c+Nvaie7|8$+acDG(r zw39EsB=8e`wBvp85P}L#Ic$EO-t@d7n0kKFXLqw?DyXpVW^%D$T*72LkXN;$gN3%m zb^VEp??vQdodG&=?C(WePQ!ZDi&@HhhQzcFHU4&mp+)8q^nIl~Se0=3F_NE0oXQMN z2e2}O^Uo!<3mJNr_%`SWJ1^-SCc$CG0BnK z&(62AlWr#o72~hp`*2URaGhOd|IISft>ce!e7acN_4Bv8O-INVOtS$xPbNLt!I9yg zQ{0A4T^#6yN?MU?Xlq}%9R40@=96DuyOu;q2Elb&J)a2N33yM(a4bnN9`@Ob1!hOg zOB(DZnkUp1*2$*LI9nqfFn7U8ZNPdfr;7Y?d;%O6BpGALH6W1j_ z1Hq0txWq05vx4}?%Uf{)91SI{mdX@F~>}YJZfwf(K0!%Po`uE+14^oWZ*b*FJwUzOxRP(AFeq1kE{TTyO zaN+E&)RGDOlTf_%^M@Ss4^+l!E{5Vhw=3~M^xIVlDC+T`<#dar0dWM<<}N6=Nte1f z7I|Ll@l++A>72azPbnobvbsUIF@yD8rixj|6GPv}rH}d|cOY->;+D}AEH%-$8{R9$ zh8@k0)D^UOvX}7aB;vYteKJ+F3pz~3j??j!wpq1&H@AXSBkwMY4D03%zwa*^s$!T(@{qCoSI#&S za8T9eULduqn4lKA*zj`OOqyEOMK!vg%yDfHnBsOCs_stCDuyY2>8YsdGx(lY_Vy5c z`Kq$>bdsqCSIb^{ooT=PTg02sbsXn1#&u(hJW zUc38uKQRtsaxGI0dNJnkD`V@A#Ti0eLt6pY-br8puUyjF7lLQ&5Gr1WeYV)+M<>aj66C&Tc}}-E3q&IT*iC_?(i!+OdK{W56NYB52)>1m zMm_8PVW*3`NQ!cn#b4FVci>ZTY;NE{;uJiev|b5b>3cY@$Y;jaf3x%9jz)wc*rH*i zj24!V7|X16(|TvMo*(|qO(Zq36RW0BF4|Vh3e&3|MxdjyNCLS!B$^e}VzNVIupKyYJJgbApz~+{kU$rpbd=Y1I-!kT6)?g>!Cj;+SYq8?N7C zkt$=^k(T>O9C9pn6gkEeKT<$L7P6hOf&H0<}8(xZf@NMyQwp>0x#|P1u|9j9eUCk7=AsEHOVn zq=K9DZ@HH-iJ9W!UCIiKb1gBQasGl7-jwwhvQ*_;*|tS5B!SIQpRY=ZAFo)R|KD{3A`-<&UtKkJg@Ni`xZW%@3zz#ghFN+IUFVU(j>N*8(!1$jF*MP)m8Ub1$8J zlBHK)Vk;^>WBcNTIdaX|hbStUx-i#^V8JRxt1bvV4dP!le*5^S;&YIb*Q1>PB*l>k zxb1kZ+L8Mu_J0Y)qgVI^3z!#e7b{XvePf$rR3#LBx_+9`3lWn(-CwdCne%nr^=dtd z|1Gt&<}Rh0n8aA#B8U7IB>KUFC3QO~$i9+8pfM+;D{23-?`4O-pwA5h9VjJ1(gmqQOR2;Wj zaIS?6s?ON%sdpTHc^UxhVvX8R7~n_157Za6^krHH0tw84-QWsz=$w&Fh0*@rhaLx zLe**}Kq_ix^jQB+5p-a{lF4ZBF(Hw$Mh;5|1&eNGa;D-?V?P|zt`S(Cr62~Xg&hg+ zyC*m+4EdvPbg67Ta)r1xupV?cnF~Fbc}cx;TUPGEoOQkRpPxGr*nZO(Jrl6&bV`jU z7$dF_K2CKPdn`go#H#J{rnC#P)yR3cL@{Kf$98uwXBqmGz~$q8QBEMlg!6?_;H`s~ zCIwUAq!FGYFXJiW?h&eu@S8zBJV&KCoZ0ajN~FCzR^xqoTU8R$KnLun z$x1=r6q~zqSjfUDT=C&0a*-(ch@p0|n~8kELahBUX~Op$z4k7J#2DF4GdWw&T->4b zm#_oq7#rk542wbpv3#wAjvbRSKM{QAj#(REC#Go8lo30A)J|cg=sO>4jQwe_eSoyW zW>)K%(t7pt=p#*p->OOi|KU+58j*}%&wM{vrr~hBJK|m?IgID7@eMH=kIl!W9}WCT zS5vGYHzESx*HLIOE}#6Jc&0erR?)}g4wW!R3{2lr{+7RZrX=gR*&9D6I%vKkX|G!6 z*G;!st7v{Xy3aANO)`NS+32~JMR}oRA}LklB}K@O_Z%rjt|BJADHb{P_xm!{mxxz% zbT4bkbo?Ut137zPTM@s%Jp5!0Tl0)M2A9B+r7v@?qnV!s-#1N+SWcF{m~x#bq+peu z>(xxO$=g^9S!W|Ea3oMA_hr@=+}NyzUl^`^FyNmEn0F_%*vqgqW>5Pt9f3uHWKTO1 zg_lFyLwCSl(<wfT=U`^ILD=@y@H2E>>`X{;v=?r&? zwS$sr{hi9ROkIN>i%WZ-jR zs&tQaObAe!;3GW_V|K{6%|f!6bH$?!^b#{-hWc}@#;znQ-Mh$>Ey#aC_SqMyFQHa9 z{^M)(!7sXpaE2hBt+lK4wGv6os^QCdlug05XHGd@8G$j%%y7#w6KZnvX}v^e=l8vna{!J+ZOTl*Q{pbjR71xfl!&G!&|0qi3{S1 zlFqg>Ci`BO{-R|Qa;fwm+?QcSL~DH=uh)_p$$Rt6@0dT$SW^sj&7S)uu&1eagi_Mv zaWs{vw0}AxHxW}#lGw%K4<8HBDX99KD59MCqFE#{DK>vT$UO8TUY?i-Zz4G#-S8}G zb&k`r@Te-TP|%!~JLPupoazI2>NJ0ZLB`w~GZ)L9&!QNrcb4WZbxrc@Ab8O_<5?2@7Z(wi9A^9|;%wBPW#G@Hg7Q%0jOArlQ?&l(jL9LBs5J04&xn!H;l%wf zNGmt~H1{HB87TL|-JyhT%zL6)w8ysiMIc_rae!zY5>KJ`Lx&pi6XTcI4jPxNHWG4K z3jRzbrj)5|!>h0zCATRmapZM^vKk9l@0~8M=xm2)*VmnMMj5%^V$MZZeo6otMyDQc z@q3U3O}*4*oxU880YRX*l$Ty4|Hg6dN+pz{)nE&?N(0$gO2`*fa=c9bti%Z|4Tqom`-(`v0SXMt=o(BF01)O zR!jI`aB+Dk@a{LOQ^hT=>_Su6l}EYOm%F~d__fGhk?2k4l_Zekh6NN|2MQYkiCr*6 zFW0ItD!FsZ+W+Sv^~M6Pr&RIqzjML%ABA+^evT6T+K4+!@E1<0g8Fa-!e16%?419t)DsR}@+^eGAu7(fYy;G_SePgMjV zNdau(p?x5HbO=5e4nokUKp_AJq<`|jVG8hYKn4XsFbF;~1cvq&321_Vq=F%UB&i?> z5)=;HNI1&PsDg_FKGJ-%T zd!z91z)l!C1SSP2MV|`(rybA=1ciXWAdn&m9}IB~1;KbBDT)jxP=E}AqyPy=2f@H_ z;7C#U$Mc_@2so1ZBSP^b&lo)Q%pye!0)oO*5pZBRBEaFlRDj?^A*oQv+areOcN7VJ zfXY1KiuCv(NQwgeKZZ~^2v8OZFoyu*!Fb5co7ZR_rYL{_@$kX)03Q$( z5ETvrK~h2d5jV`=J5$3zgyC4k04ty`5FiygJ(3hCMUfuqA9KL_AZNVq9#}3vnPR}B zAT4H43Sd1zHo(gYkW?@|;Ms5~csLlIs^ClmiV|qJOt4o7w@D9|q6cjI?-l|C7!V1t z5(1to1y2EkC`!UP1s>|*$XMtg1q3~S9~c4zXcP_xr2>MXLjW)10~&GsC`O=x>n8M} zC7=?E)PD>CApO%c6rDa59gYNo0HR)yZ%JK$MkR22AXG0@Xd-JGEulNE9OhuLGn=|EvQFM*w)G$5#Y|Xs7PeJI|b| z?m4Tq)5gOr(LumrfYtu_0|G+_C_#WT@FlS!FbI+`2nPD6GhmL; zfA|Jm0tcBV$hz36T6}RacK=p-udhsA07L=;1d;;G2TB3M0k`2J=s{qJ^h#M!V*|{~ z>0Kg^$|XhaGUQ)eLFs{T4F~K9pdSoyhQmOpTpQg!1tIZ_n8Vsg*%E>P84SQKaDeDj zpl~Dw5D2g}@CS6A20M%>Qn6qcNQAVh0a#c37qI|FL#5!UiuC_|&}^~^GBFS3iRD@2 zhXG`8AY4J{ARr*||0kIueH!t(%Qw6k3iE0KEF>uCAM&68F$4h3e{%!Q(LuIaUC&h2 zRE@fP0Rq6z|11M={!cPM9R=Zsn73~l6=mW8^8!GD;6p+9=>JKk0KgiOiZBm~FUuYS z0lp6h11iwN{x=yU6aa(*1SCaINL!Hl54-=A#Rmoj#s7K0L!ppV#s3NkY&Rf^fDlRr z`2R0%ihwbJcz~xO;Nb{*d?)}6NGJ&M@5+C0qyL9p1bqrRC=@V0;3FW4q(G@alm8@3 zQ2+w(zb*m>K?mdn90UP+|KFwqf*&vxfFcAv5NRoZbAhk_1pc2Y(5Hg_$%7B*ivUJC zKt~Wrfj$%%`_TWB47dw)3g8gHj}#;vAOHjW>47omKN-RS27tbRVL`y)1%|^AfLZ=& z@tze_RAz|G!dJK}bZ;=QRpb;G?&%LMiD}kPXP@ z4C9}VqgUh^#pQS>oJxV~_2I}(2@IsnLxo%oWpr0Y8dKLQrs&=HAVT_gnC zepX+8C_dABvZD8M?Bqn>%yh;7Kv-zR9i)nPEER-MYcyj(+lHD4NM2YzdZ8*b9hBqV zao!@tEAzKg&@gQJd1X2?g*2M9E^WwANI--oNhA$nTIYzZ-b)wsnRTMTy<*LE$mXCK zW~D9{pAPNBaPg5}dJv0ZkDE>{whjl&Bl84uI&Lqz_Yx`DGmL&a##90QeYYFjUPLh` z|sl?t9c{hzvIhI8E$XRfhTWL5wp#jUD%?{5@ z;|l}w8ykyS@U;D{OgST8H;kC;_c03|^@-hnx`kE8L&f6KWv7L5YWi*V#c&tKNQnqo=TSMy`BSC~FXHSO!gNA|F}uB(U#% z?ZfI6jjL8rAbZ<|Q@{wMDDG**()NT7kdlo1@1@A5x@JX6p(G!FDb!{~mc@`DM&TMK zWDgv18wZc`d_a3C28|l}`BqLAQCAivKm>B* zry^J+lN%<8nkWn}S5UiF!DAStpg|?J92G4jX*H3^lR|*MCbz94*;#Zxd(_0hCn@V$ z$Fuy#rSl}C<){^^G14D;4~8L-B^E%rt)ZBbE?gCG?@IxFX%R{2`_X{0q;cz2WQs61 zRGJrHI(e&P?lCk4v7!vNNw?Aw#G5ve5B8#jmR4YU&xnsy&jnKw{jeCG!89y|eJSXU zct&$a;PpB|ahKdPXlfP8bCNV7V=C1)p>DjI;ej9yB1+aGzObwM^!cyw2poD#$x2Y4 zMrGO|9G@B)9art}8&uUqhS9MIC+Y@kOd|BE!muO8#xmJ#h*x_?7nyLO?s1$E?PFqc zI!cV@W^TpHu-wEA_2h`VNT}eXpL-$H*Td^_41Ch%&RytDDnDsJjU~^uEj8?-5r15?i5txbOUPu%CS3jO^h z8UaqM8fa^SWgrM$bPdW1vh9{FDxUhV($ZUNGp;_U4-s z{Gv555k~7Rm`kl34_RFQq1ldc8?&6GJ@nqU9C#nuCyrc2n<~s)w2vI#UR2Y7J?RdKI#bBGv?T10UF1qF=_6@qq+!EvXA1W3aPEH@K` zY8oTb<8FKFc<1Y3veu!rFUnUbu=u$=uj4GqM;Q1L@)wC@jWv5R{9h+od)j$S397JS zOLvmVFOGu0U5c?82TP*HGQ__tmZo#>0EUGy_pm}YP4}^kZ*p^eJayBJi*h?i25%+Z zWV$+}qd6J*rX$+0Uk;h1!Z2hS5g1sSDDn(^%q>?0jr*WX9Pl#lHFN~Mja`cS6lJ>y zNZcX1Jm*BuOpF)l-AetxWBS~mL%Vc@&6o5yZ*=>vQ_%cz6BmWJ^6@GuVzru(DP>-l zWm#j%osHYZtEQ@^;V!5Ei=*uI);Vx?(gc>X?8VA!+++pW3jGGDzq72nCl~yPaeBJb zw-!=EPv4JzyhG&Tb_G^btOa<F8tf(x~tFcmD;wydbYpm-WC!EaHr&uf|ZLN<{KrPuGivi3bWr_jIe zY7vr#{&@WZ`xX2->I!elpw~L(FQ^vxU|1U(X$sb*OAJ?REBWY#l6%~3;#%<|2|KGW=9ly8xo>-bMo;QngUe(I!Z zG1B9N6oYrN&rwl^qbQFZxwXT^IX$Pps`iu7V&-%zoXTsJ@{WopCB?fao1?wN%Q-$S zY7)bP31+<-$-u`#TE+50aV2P@nTMe=Tu2h0++uNaaA!DSr@E(wxX4<5$vnD6QJP*L zrwds@5{Jj>Gm8~Sm49Q$`Zi4yRS=&%xQB5pF1i49xt(q&z+6Pk%8lkmrBsfBge zLyLuaQ)f^=BSd4M(DvUkk;8UwI6JMfT)%l%G2DjMVqFXgXHZ$-Xe%HXBBsHowFo0% zx)rf3wKlrL!#G9Px+8arIq`|G2z3vd&@VI_8ELT>j(SwoBxX?o6Q7@r3%9|>`7Rw*&z-s&|a>rfPY43X`-#meo$J<^8_ zkcJ1H)@w&O2UjwlGBrmhI=4t1gMnI&4^G!2#pM^igL#dc(=IR--rgT)ZR-AA)TmflY) zR?hub)#e9kUlE;Y*IK&+1CJsZ+>J>hpmo0Xce=f8%50c=CA!Sa7j;?J00X*KnHI`z zTN)V?soy5M0X$y40;K03S&I7z*J53Sp>h+!9MJ}WU4?m%6p9;+eBBKqgbv}RC|poz zwu?+j=R&d#T$m)c7-813v}nnoaT0h9?fp`uV<-w-9JB;>@Su)D^FF+mR4*#cWL6I; z*fz>HiCE*n`$IQ~|8Nz8&o?Bgctz+RY;iNcsvb^YBD^m>AB*~FAj4ETvF3+EI1n#h zD(wp5_)HizPLmjtjZZ{f$<4V!MWhSsE*OZ?l8c3t@x{299 zv@I_RN#Yh?Im98+`x2d<u12N61mP%VU4OUYd z?H`FL2sgL`X6<|MT~N_`FtN027ByCJ5e5^O$B7*H`LQT0eEDvfG`% zbkNPqH_Wx3`LLa0R;2Tt)=k_^TI2Op5O@~%>NS=`k%KatF?y6~g(!LM{A(4eG=WHg zi}*>02a|#7JPP8DU>b~z+&Ijk4{3+OV3_z@qTO3?4crkK?PvVLgUmw06kxWHD{sCQ zJd0wBs_MxBy@`3Adz>1@j8aWfDJK{{E|ITzcE}MTbY6N}Dpk2?Q>j|m2?FHIFpUQl z7mbrKBo}62GiqJlJ)p)z)WhGi&|7@p2s`ATlp6>i(Z5HD8pGAr-`2k@Nm15#sN&#_ zth@Nceo6oI%CBmtN|R#`18>RT>iS}#VD^${6U=Yi2bf{0MR%TJS0!27#3F>mk!m>4 z%0WC~zuckmxb5nNN6>)WVkD(kh^2HLpdMEql8#!=<%>7rf$sm@rpF`- z3{GkXro8f!gSZBw=ZmHB8j_PkMo6T8=}^Ruf@{1o#8`wcg~O;24}!N*lVd?&$)XzF zNGO6{J0{A&e(HloPm=#p4%LG`SA!k;d#H4#D@s!NE`qG4b!4^s+5JCWcJ!#AAYhTpx6BYrhyQ6_gv{B^cZ(U4@ z&-Ii~aSTGld-Pj<+S!c)|A9oSA=vcG{diiBUMlJ88+ssvwmk3OBJFbp9{B|&Q)_E< zOCG!In2y?1wlE+XV8{87dIm0EW@Kgj1Y!l1m{E2n6scr!Ol05C8Fa>|pVk2D*udB= z#GRH_j3na*B5}YKFMzTXXwl-u%G7E9f1U(B_sLWnVSLnmaR@cg3oS|5qGe%gB>V^B zTK~@Nxp0qP++TNC#CFawEOfy~cjO|sGUs~r2=WXr^tk#_6|!C}wBW##{S>ob@KkEg zmh+JnkXO$3dM9FQ;8+~n++)o!UQJyokx{7DQ$=57+fvrnx74y^4U@dTyy*bN9~Phf z1v!z`ePFJ|XKv0{Qa8?t+3tsdxE@;gRBLa6O2=s^O@%;@FmzXD+VsNbtitUpOhfZu z@f8+#MK+?PobIpG1b?>odp4LrZ}BwnR8_!1%=Ob(%17w_`kVGD5`Z1NUMjFVPiz|6 zpBOEwg!(MqUBc#v=E|UU>{+m+Xw@_EzIkzwF0@#$q>HfrX{pL~?fp4xb^SELVlnRn zefhT*(y+fvvKw$t!`e=uyx8h9rLU;s9KB1mn+?e8%PQ69&lSnmo}6UTtJ_r^k`hUb zbOaobPMBIA_|$%nX9aKp%)V}+tm%Q-7g0aEVo7DvEW$pQea(3UEsihpYzcl`-^mVR zSyb$iy2v|9G1h7=i>r;oh}W0A!F?MmtDm@Ns7LdIrgd_YJdh$$Vl(z*VeIS29euKf>QmtADI7z zYq?b(qEnnKh**Ai@=wbz(bQ%;qEYuR{a*`fM%!;P`=>eUmb(K!nY$Y)>F#MRGNU{> zAq3GdH{$ys+TlO|myi@CKvj_-F25gAqUD3wE@90NgL>D@%)z;I62%Gr|>7Rs2J7l zHXjRb>Z=^#=;zCQotW<5+I)Ny5;uUOFVC8>^>1d>fTyDz7kAGjFwf-KI@c|EC&1s618qfo?_7E zTVR<5U4i=_NPho3d-_O?&=6J;6ll##Vo7LCUHEn67{Bb2!BtM$TdvWSYqMh1GsZ24 zxLK_T%~-EIj{4%edd~i{W6SCH<=s-lv`Besq(yc<^8LxC-!WE6v8koB1G3CjlEuPF zP2?x}iT0L@U&=LCc7!fgd&&eH$fM?gNnhsqh+C#Bv1JH(-sUX+T)JlJQM986cJrOA z#`>~veFUiZk&r@uoD@}h$-?Sx&s?r6tAh#5qWS{;LhR2g4^dm@8mNlA)N&G#KwR#X z&i)_B&661GS~UGT`QV)0=#Q}USA0>_opLJdQ#tNI>8tG0Wc| zKHvrK=X5fC|EtdD0t)^O>j*b`p6C~yN0COxTryo4YSjD5I&qEsBL$}lGVIH5n3oGG z=1-M&o}7yW5*Af$N;8r5kQ7ubYnSW&e9*vG#X+e5Koj zzJ*fWdvLwIOS!?^JP&2PMJ@;rvOO@p{7Nd0dw;6sOhoJJk^VD=pIos2NL6KCvsoKH4%7yin9|oUMHd-A-vPv4YMI>$yah5J>NMF zL3jBUP$XUAxs`9Sn5*-5T-QhYn&fA_yysqSz0+;r(4D8+`w6=v*$vL3om@`6s0rat zcsKDSFF*HsVtHp6e+iIyxwjvV%sQHGO#C>fy_1jAwO`&-&*TdcS#Y`DUZAxUweZEf zIJZhQ5!Hj&7f(GvC;xCcJi&CI2$&|JoQdKEotx>G6!ii@<#bX7ehT!rwH5Ezg#ufE zg8cV{ew-_-<9{GMkt!}M+*Q8GlJHLf`0?&Q&&OKP~o%0 zNR}*y_5+u3J73cg)~nkB3PF;NSuH=+k>TC{xm{Rarj)N@Z`zYYa6vK~*GWMfgq-Do z*8>-1v?XviMNoctkT)WO8!=K1+PRYWegA5qPvI zZ){+c*Ps{C(#z@vrUsH`e6Adp|Fl5kf$QW>Ps89~y}Fl43}U~poQr*;V}*J1PJxY) zS|YumAWHgU${QDGZFp5kZp}KPMQb4wG^dyC0UlMS?S28J*<1YPg`xWa_ql<2kKa_r zHj?}dXI6ywH|`Fn1(D(n2?jr6?wCzv8NX0e9`NeRVQkRd>N}5o)`NcQokCu<#0+1X z=k8OCp}9=a5P;Sz_PU&45>uHvSjXUYNUa?HIDWUwA2?XU=4Gx# zz03czCu9ubl%=!Es&xyPx9i3sX@xP#7vKZ+n)fSVtuT=_}5!jBLL z`Y&*Obo{oaUikYd#@4a`z;gEX8p@-a21h4m_N-ieMPL*Q?X4I(6^s>{BoJkd`3&v9 zT9M1PQ7K;8-tXN$KSE{ui_E zzr6nz@9dNI{V~o>m;Is%FkpTj?kqrYAXr75%lZR!WxnyNFxOJtRxCEDUM{;4ECF@k zs;oYLKL)dbhT_t8)v!CXvZKWJ@8s?fLN(DOX`M+ufD0OzEN5;6k)&K3#q?FV4q(iG#XGy2JH)bbSuh+ zT>EyvV%vcw9ogT;(?`aLEoq(&S@k0C-6I%#H$U7nh&K8)O; zsgQw?Stn*+PuV8-0Y$L^S1B_{hlWx7CKr}orty!>l|AJ~ycRhWD_?u`&@69mq4d&w ze?CRr3(6V2nv4agUdrNLS+1$0dL8wjo70x*5L~_^3z;RAUS6V00=^$ZITh`^a^J2J zmoWU=0>~v!HwEX_A=2GU^n}6)O}r$mQKk}NUpMeh{l0H$=c#V_Mb(ipl~Cf)<`q5%YrOpj631fClZ&*G|F^qrpMxre6?pwj>((VtL9_b7 z88+~60#Uwua|&&5FgL;>{`{|^UE(Hi3E*=xGmUB5$*WbO`lhR51EEo&tGJsTuCS5A z<{nc%BBBqMGpi`cebcyUqZiV6oR|@x_Lso*aPw@GGm;N_@&qx;Md+8Sly8q5OQNh0 z%ACMx0abL(S7`mnq5k5`biH;s#;Mc@T~aRzPNV>r^Kw2-r)6InsO*rX*if(wQArv)Zqx83a+qgpI z*?!OX`6u@^?`xEC8=1G?f?h`e;(p*c^3R*cKk?cG^T=5KVd{Wf#g34vuA0q97NsA* z7C<*9T}n>&Bc?;&OnynWl@Z5Y9w4a;?2A3l)f_3w@mY3F8pZ55O4 zQw`pEdkLEFo0#z=yhq#r1Ju0ULA~yz!9coh{4P@Fr;uJJYpb6vUs8 zoHaR9)bmiiBh^Iml-s5B<@uePF^W;nON5M5+H=A4SRWOw#$&Y&lXp)Qc>rH=hVjA6 zH#*7tOvcn5+OxEK@h7E>kJ<`)yyZ~*k(+Oo*m-5Tl}%m(FI8OPX@t<|L!`9dG`?dT zF0xYbCsrzshu^Rf-T}3Fm3&2KaTOYI>pi3{lq2MWZg&TzkWkO_HG(O3Y$hkxp)b<_ zpLB?8l(rr_PR?XqQ^D53j|_qR;Zqjp$NseFFFyTu6$8Q&5Wnn0v*kdw&Ht3ilkSEg z&4W#cqyyETg_S1Yj$w26}H6@8~5w!j&Llm0-_nN<*ktU*0?9+()n+o3NlW46EtEy;p!jAEyV6-!=}yayH(J z`6D%wFUFGvYc;RozTijJ_9if)`9*GX2&Oyy@q*C!NtW@`pdoTw5)%^epPJ=DZ$iDK8hm~U%I^sT~%z!pCUsJCCXEc*u}fVqy!l@N$QIpRC{Wj#9;Vm zun^It~IWAC8LZ|$WS4M~AFCxp|upGGEu{RwkxFRdvpnsmKO1T1It@fP-bGSP!w zlJG=SJ%T`1bK>K}M$&P|2(MwvK9f#Dh01_CSf7n!au2e3%vqv^@C4;^=vNEDC0cvn zB}%4ku>=8H_d!H6P{lbjv%*E2pO^2_+IL$>6->66(%E_kNX>rDZh)9Vc_-dSrQ8c& z2uej6VrCgqZzX)dPJD{~{fUBB^3c*QV)Br&XF&cG7 zq5=NpwE&-KmVi9o(6UUyd4w9mKxC*(ehIT~0!Ip>(=W@pPlcOds7oRT87Xdb2p%bF zH9Bc98u0kPRtD=D+ExPpcjbqHYf=fAGBO6JzON`B-4paDsg(pKQgfs*9$|nX>fVi# zI^0oQXm*lql1Y8+DhnDCkMf9iiQ~-iwh=GYh&TcZIwijqPtkRT7Xmw*p6YMxcZUMV zJJ0$r0)hX5pc|=Ban~K**BDy$ux{kpRW0C_<;aa0=0f<9xD{%d-2>KsUlDf@odke4gpDq zcabU#rj}_mwHXLn%UdwN=Q{Ht zPQM%9_$92`5~GJd(4uSLkVm==H&1N83$k1S9kaiL=HTl8eU1M!+-l~!Hd4r}6`k9Y zU`q^9#`R8le0F{ms%MKW&k1CKPX6*vP&|K64@oqf#$J7TkKf&Vg&(V4&%}I2p@w)M zVY^|T)2)tiHD?a+rjMh4LnzDhWgQ1|Dt6HrP7GtsVj^j+Gyx z2>+cdnSb%llxVO1sQEJf1JRq7pj+>Ews?Dpc}->QyJh{ijQJ^|v4(-sBHZB`~ z&OD8ILhfYw*C6Rsee;t6q50Q7t=6*KqmrI8w6}VhgRqwxrqW>v{k6kVwx|Z~R25hJ z@0Oo<=NuFcH?bMw(tS zrWY6vRpAy5dXN4>TX2m)PuGPDb!edHf$L^VVJK~W;t0$wvK~MnF&y;8+`yTbrE3d+ zKf&&j;5;J>CnIb0M}dSO=~_PcrHG1GD`P#Mv9Fexm(fn=GR0wLb9E&{`FOk^l83Lr!r7&YE6+M^C)|pf-%!F@WmO zYCih{J7DQc5dJMz6kD$Nn09Og2Q3tLkU{?_Qx$~rGTBQ34o`^seUw~C_R#0-WM*=j z5s$xcJ@B_xy6QCB&(m4N=jgQ4s51e#3Jf5JmuS71>&(guG@y7x(<8ArpnnfLM0rZX zatJWIM{63GcSr{A`J)>r{%JbR{SB&<25fgxdif8e(#PS(ORuIiKX1Qp;VhF{Z%AN> z284+_y^dT-XWU)$0AG~@o9`Cml1b3iw=pyCH%ozPDJ$Lhqj0B>Kp^dR;a*3~iJ{-` z71IiVQfA!ih3bP*cqoMCRq8w5KwMb9$g+2)|psw0-h;aJ}j;~X7WNc9?l zbayjWT|zmG_^jSUM!qsnOGI%nqNm~|Ve2A!$?E(R6B>cW=5mLtnIF*NlAhKmSsvPl zQOS;oUw%&m;5cU>^%+M@rR!9rwf&w!Q4REsSNnS=TGPen=_M3Bk1AHgMC6_SW0$Gt zyGJU^L;hDKZ18ZmrN{UYQ(}BwXiJb>pLkn%!G{*9IqR9Q&l%qRp5uU6kMz#)#-xC` zUNu_r!uKPnkom?wK;;YK7G-J79A~bq@KcEO1K`WziXGvcCk6gWoRytHNk0*CLtywL zi@;fDf265s+Ed^m)GyybGWT-Z$psC&WAy4+qa0VhWy~s_rgL^RQ`;mMHrEv8`$>GV zvV)_ZYPUMAGZ!qDxAwQtX73j&l2rskqD%966%ji6Oe}rb-yQ8<^;185D=;2 zhgidPoTNt0rHXdXd1mm*jo03I&u*^m#MyXDYm&v8Q$En6a4_kjbiHMrSeaQcC6WW` z|2}SZmF6i1ojYJ{Y;0rRK&QBTOYV-&K8^dS8ol(kM9BhU`&MY{aM-9u)sdMRyB!du zmB9ts!06cUWd*OyJRkyI*frSkwlfjpfhsmswc(*Y@4(i$yyRHnscx9m$YFe!$P#LA zi9!uTgvNg$>3@{F-;rOHE62TEaSlkVgz#v$v5pD$k?vtSIV;PSbBJ(6WE4>U-taTS z)>DAX` zr(?TdC|7^_HoSAs6!8M+O(gj=eOvO>JvQQXH612E@m4gmtdy!C1Q?A%a?qZhAmN5I z8v${&kMAN>r_qlH;pDm@OBo6wJC@+GEhAFjgIsJVgY-inVbl?>6d_NROaR^(WD!fKy7MqQUeOZx$EdskfKkuNt$*GzcXBtR z8&YyVgkZ?zUG`+zzuk^dkm!*v?xqy8ft=E6SIgX?lx`D?9GoR4kk?7fBd-yWv-vx?mA;#LApnd6SzG{~GGR%Go^9V^N^ za7s5hiYwLSk%FeTQ?i^WPGusgIOtC)52eA@IYHl$=d?x7VUTM5_5+nzI;c$1T^UO5 z0#32QKGNj8lpn!4Oc7Q}pS~NrjW1}Y+KBDHsa#TD1)OPJ@2bS1;&7lvExmrkxvK5b z_kme2#jd`wza`2)$iU@LjCT5Sxjwxv3O zzOZTW36cD4ltIkylN>Ym=`==J6*+gAtahvdnA?y1>;>?lM%-AkB()L;#{NdCwN}@u z7lib94VzdD+Yacu)N5EB)vIwLzl*PXcZFQDLrD|8I)YeP@p<}&vF8hIvj6V?&Xto4 zcb}x`n?Y;G$839&B7Y`Kn<~RRLD7lt66^c}-?0c(l}j_RKzI)2rN{&YkFBtYnXm*- zxFRwG0U0$yS=7Vu1HV~EVY&YX@;xphI<(S9@9YVQs5xY3w{)y|`T74Ap@zCgwKQWM zBLUOWT3eV5MLkiJ8i;LBsGzwxh`(!^G*TJKC?lX!(T^OM0u@)!7uq6{mtXvxhOR-T zy89ik=bmkTFg!z^h2KYsa6@<6!a#gN!;wBaP`|Z#p#B3l(;ql3cT0P%oFs)tS=VbvBpd?#_c z64oU_(NYqV*T2UgT1Y=;lkD6TPs44v^FkN%BGSua#*9H+FJ0z}0Q~ZccVm^97glf7 z9Y;3D&io`c(c13z=ljm@=>RGR5EsgLXHg$eN7YCbm<14VKYKw}$irYOJax z%KZM(L?rM`L&H{JA^}9SMmVWw7fjrLU!v&kBQl>y#*MR!YQ_W@ao0zUZcCnA=tjU@ zzuq%WwAF4M4FoGoGX3B#^Hq8hw*s!+QT>(K+AX?uM7}9^nh%o5P?9H#^KcB-O zP>QWdXj70Uxp9Cnf}3$4q;a>woYrX)QpH)jv_o zW78#Xik8qeuDSHKR@sWOOqrMF;lB1en736dKU^_6W!lUc+)*&&G;TnY5UK|mm&%A1 ztC{k+q)yg!?K;g$_&!_GN&7V1EkNz(%S)J&!cvZF)Voc&NT_K{hRsXfhYAz<+Vp<@ z3+?tyzp&k9@iS4?xK?%=51|P4-@4V{I#D&in=|-Dr+h%Bvx;<5mBe#ff7QgaKeJ#| z86B7M#R|$~EVT9n=~9pM?WUxJ;UrET|#?TQ|<1Pa?R@yRG{Fz(S&?C!aOBbim9QQ$Fol#U5i_)!l( z*^4cHvYCehol9B(I?)dSM;uv_hbbDM%jcW}pU~3hn{db_J^jf+xpp(cs>|>_DKhgtCF_t9`Vld3){8DqM^>Lt^}mIhl_%2f77_q zu{c;TDGj<4S$GXH)r#OYCE6g*O!ah4bpkbFS}&pZV=m0baC^OJ3GDy}e2?g*qMciU zKZ?gZ-i%el>nNw7zlB&-x`R^k*4dE3KGr>>%9wcl6eb$(E>4msHnB)$xFIIFDx3ND z8zEqb+L~aNRcdw=4t#pLz~N+JgX{r&9S!d>RI}=t`iwb$l70vktl(f(o)OuEQ0F6b zB&P*aF@>_p6^`0tp95PogL$pG48aoXbu z6AHZdD(2b;If`gsbW)z3y{2RNu%qiX(pgYDzGo9nP<1cSXsfP1#xg8n_6&~u_o!1^ zN`00uK$V=z{H#@W4^kDO9;Uto+Yt9Hen?JI;~r|D9R)K)pB-A)e;}P%BIAyY|I&jP za?YMJHb}6V@Wy*YUl8eCmH_0l?%h(T$XWGZ?-C{0`U;Exu1RHd)je@T9E6Cwktj?E z1|`i2#zt17jpl_n-8We)WG|>haCm|6-l!c@6twt3QFp={kN?=q8Xfy0O-D&Cw%Qhk1C3UvUeAp%ufD}$VZoGfBix=Th;1{>nF~5lBviUV=D&` z9cm?qjK;7oRMZ-+4cZy&zq^+Brub3w2En`Kr)(0;Gu#4va@oIA>JE@b36u7^5=0Z! zU23HZwFp|7xWLYJIJIC!FkB(#bxWJWO9WI(n#h?wvt$?C=W&~woyckj z$FsfCW>y4$f`>`I_u6JDd9+x^%B%H9`&ks3ujJ5k?)hvOp9N5_$mKlWb9JWI>5X3? z+ayBqiB$7gC94VmiWPM#@ubEPSXj9R^!h=nSTLopBy-By+;(g9gdE;kJ1}s>-LW)H zrBy+P5e&4I7*LSdPXFltsiO9^jBGoAd~0HvuaoeCngEr;ON}T0138GWkX_L9zJ(4w zm2G014>{b&Rj4T}s%eC5?l^(eDc>?cJyki}8zu}*Qk76L_{)c~>7Z@rMDo;ImQ>PF zX57bQmRZS(Hp@$ESc2x%Qc?i0*!pV-(W6W=M`5914(f%qkcZOmwQ+;Ln#VZLrY`>rW}dJAPTCM&>42I<5Yd} zZIZ#80{x2>AZHD%@=C7_jOUWTMAGEwfE>Y$`h8cgGIRzA)nv(3UKSp+dHZABvj!bt z>0v{+Qd|_cSSE$G(cMJYAEi76;xa@I6B?T1@9#%3qMslsq#wvL07bhm4BnvPV7A_b znYMq3=Y*H-gAutesXIh~iW|;MT?eVfCH9h-FwzDdI+BfF!%-E%4ex8=;>7ftBpR^G z5clw!KrujLKf)wdNZyFu6454WQS+q)rN3^(@7M+Zz=n4bJQgV-9zt;fMo#@`)rXx6!cd_M0~x^X*=C@8J=hOsQ(6Z!_MDs*8) zd34Uxc?qxX6)WCPK|6rFX3D34xspe1I|-sjy%tib3QON1g))M-Q51jLojKRbrufQM zrCu5-+GZHhlrPa)Je|$N(#%E=2Srz6TW> zB@sM^&`%Eixy}*8^vZZX9D#X$-w|_FCzK;bbg(DEM`Mq?lwNtl*17F&zL{stNr<(u zfg{*6VopuI@`$$jfS#>?_YrDZjhoxN6$Br9^eM{j({{hw$xoDFX0dXqN6W+Z*E`!Y zD+!I2uc*U`-`qvoAw9@40t|C3L!)|+A1-SEL(-^5hh?)D=d68{CS~M--LeB1Yxqx_ zwV&TOOmraLFsM+-LHB8O$!1+MmSK~YaK8HnBH2h<{>#l75vntbwTV~>FWTtF6tH6) zc+Cmwy^80c<+v?RK%N?Br*~yUfoq;)rKveqQlG(?W}lhvIy+HK0z-y2C2(_H4LsY( zD+@L^pJy3|-@#VP&5T6y3$v=-w+COjE|cA{LA6cp8CcHtbY%QEVGSzO&)sIJuO2Ab za|$R}evrGU!cy1Tb!Fpjjzq0x#d!9Nkp5RtBh^;Uf>n*QM_Y4Hq_6NL@1ZQihsHZO zd0**=wt^FxA7G@(_T|6oxWK5e0$)LE_YMDnP8dK*>F^T}cW`h{Xu`;XXCWUzT6T(r zKq8&iX2qz+d(n)ersKmo3& z10VH*+B@c!;S@TEYMnJjME`8t5F3O*m2#db3{iu(9&As&b12LXA3==-b_t3)L^;0~ zFWlrJw~4L_qg|I_9~{TjZ2p7^;^rynK0VbTbK!vcfK!x0W&09tl(DWYlZNaJJjuJH zFio>FUytLobYxLB;sJ~Gp*lW&z1d@%(5C7TWy=!TpKHVlAz94c&8ITJJQYN5(eyX7 zBQ^o&v1t()9fge{wWb5+Qeua?n2)RTM3CL6nwo&GjV2xH7EP%X0^u;iw>gODpq@`6 zTIPW3(Wqo%ej)vl)q52A@1D#uLE{tFkYW40%Ilo`q6pgX!xrh~xvh|SBdPAV@Eq&| zoDEfU{HR*d+%cM3F*6q5c2!d9Oty*+Aktz?U9s(m9R?kQW97y^T2(tC%mGLg@1!PL zO>K2+MqO|zCdLC~KZ-A71Me7U9bAZyXdwu-5?c5W@VP<{c|syjay#4AOR9H$y1mqv z_7gFhkPHI~95f99#{g!^DCc-UjlF7veG?ZrPzQi7h#I_jsi#s-@+(Qa+wSPMXJg27dMf}l_K?oqLtk}RK2OX%L)^^E`cA?;?Kz=9* zo<${qa+Y>seOmaQUVn)M8a4HFXs|l^9U@v`VTJ;GgoM~AoL~AI!(b#4PuadjG5yC)uhCk858ud}4Xkkis}tbmk>}$@U}#ASM`TO0<&OB-wLGQZob~Yu zj)QmrGHAr$^-gtLm8q~#PJlC+uxqeqUnJfzF$2uuB`71d&gz^I(%mU5FnXyRa)%~v z>Cek`dQOU*Uh-S-pe5n*NOeN2Dl|TjZ3Bzhdr<6^=gc-6K6R$YoB${L74t?Xf);#gA0|@-Vnv<8 zIiv@WUJM94w|a*lXsh$gM>t&9?k+*uVuC#JfjprkR;uR8-OSxsc4}%_7Ikq>Z=nt% zgS7tjZP|(sYhLrnAHkpfeZEsdAFHXzOp|)x0vVlX` zx=gLXR<#2STyFH>AhzUT3?2irV2dXMDite6>$~V{?*u75_;El7C_7!z?{O_FF6GB; z7c>O&0#XQE$fjF%82@N36gRDL^E#cXgf+Tm`_ktYa7*+#aqk8s^U;{XR+#^pm1fwn zXgvAXIrWl76!C;Xqi)B2&hY0F!wk1??T5RvxpViM3wMtV?@At@1m6tY!(v1HUqc&Vz2dDCX2RV;&srS?6Zr zkYT9sg58NQEk~smEG`*NivhE4=-^hbR`M#MSUG}$CuWDXBZC!R8qx|zdv{@4;&9v7 z0)&AD$SL=Z&F(vs$q72{KT=B>1E`|8@^MAxFfRJ=NezRd!wFiQd2+kQCHWh_31e0( znFm1P)eBxYEl=N={F?}>aH`r#3wfGFwM<+Ko3$Kp>PmbxQ-~W> z$238xX}+k%hGJf@4b0MY4R-6`CsPf`OU?pJLQ9aNV2@krf}bP_@qkZlD9KrFwG4Ic zuxcw!t@Ed(xd)u68=pKH_UD`LuqCXW)96O$1XK3x*E}KS z5Ai98J=kcWjj2d}7;_PJn7h7*&>r*%q-))M2kC0Vh8ChIc=Wiu#r%?7WDarcm}4wTVxh~=9xztIag%8 zSMex->vxSuoLe*5uEe%0#)=<9ESu-?v)T+!1k)pA}7#>`-Nl*ni2E2?u%lcr_*k^5Dp=3%5k*r+a zL6eOL3iPq26FP3BX1k+#zF5*IC2eE6k{Xh% zF}$vr;S=T!)jH#=s=+y}-?q##S90UI!-XutlXZgR+0=z0@%v^y0c8U7zXS9B;>C4> z>Ji$sj5Io1VIYos0hhHGxXp-;z_}z5bemQptgMzrsI7r+v=6$l5M)zvPBaOrTp}82 z7gDcC&wRoerrE-=y<3M+$-O8aODH%Xw)06+Bs~t0-AKE18qAw+&q11`00}`=BDC7s zyXtXo`~ta| zpxyfqmB;ZR^azLi!+#(efj}$ua&q-g*>#u_NuIlx(*VlzW>;=!I009Qjnl%!9l3ou2V9MLgsu&$a50X$ z$aJYf*nt}Ol;&~7p_ZTNGZa)|c^dT|@;TP0gnaXmG1G{3!XJQYR-^8y+H~W;LUlC! zp-e`2hpJMVn&u3+U)q%|a2#Qpl<@HhllBdHIdT_W5`N6a3a#*X;%KdKc;J<^uLGLv zTSdxa=a`_HF>_JF;qpY=#+h|z1|?vK%tTzkp+deQmv&W(KjR{7=CO#&&fdZX+R|bS zHjH2caN#(a5^Kt9#=|}*KYaDS2xgWs3Ct8$xi0n?|ku_wCDJc^vg z{sUYjAkf@GN)I>qnQq%m>ap_8#24_*dn$)p4GarPd*#<)sNun$?mjNdQ{iDsMYK;J zxS@u4rr?})5ZLlgoY{DD-o0hvitKC;nHv!H%|EHNHfR8;Br1_th51*gsKOd}eY=o$ z0iFj`_$bBVDKPb&f1nxQQ#T+9ZcCA}ak4?pvio)BX==;Sz5#2Ik$+66`S2=pao=); zSn9Sdf<$L+NP)hOh$ejr6l4)fD%q8>3j}QtxN|6B8YQ+;GPMnMgP6vCNHrGkjEG{I z1o{UgplAAg13a;2dTHgmUSi_w+WP&SiRsEmBba6dPP{^l=7-Jl{*w*tc}ArySTOL# z!5kG>#ntHwwp)zCqrGTC$twGfVqP3knl6X9N+j@FHOqMvH0&!NkVMdm0>Izh5Qxm~ zV>Z**Syx>*5-4C*bX9|Gq@vtannp>u`sYCU#r+s|?Vx0Ax}I|!QcihTZ3`K_@2fmo^XI5o(cYQK2$jm1TWNbyjf!I3vkn})%z2D?$$17Qd;C@aq829xK7~2w z!WQFjCr?It3|BjTR~e>JlYvHK&2tV8bxFn-9={$`>zr{XzRjT6xfmnX$N>bDDBYz$ z1=+r;B1rjrJmo)6$$1ShB&eZqO3LZXGtL_G!4dtTmae+i$;BP%P^>98Ep9UqM3uAx7>Bjp@JrU6BG!y(5f26q038 zr)76?hUjY3%=k58D#05~xo z03X}*5z}8_^j^{{>J-q!WMC!4iWF>)%BrZEjpr@FCfY)7E1qH=B*0}2AYavZ2<%0G z&YyiJGM?$>!vE@oXg4=vP#Q(zv~`WOZOS}lubA~!J@loJ3Q;WS)4OkJ@0*!0!a$Y9 zOHgWS2jpm&M~&(Ic#yF8b=vKdk0?uWOeThNxt+l%OH4nX zNV-&Z2Gj_yfo-+UB*S3DO0@bf?FYq$l1ez0fIc*UBGw4ARuWlEDL6HALHUP-tOm1g?xd%zqVSb_$!Ka~~{jvp*m;w5dG%zA^U#bdO zk^kbtPHii4W~SGM@goSb_q16p2ih0YHsf&RaXhW*`JdI z&3m(J>XbK+Xw*uy^>!F`W)FpGO5%7u7}+wNs}&trI2BJSYLvC^QrnYz9sWkoz}~+G z)t4oR#X>4@`C)4YJ2-Q?J350PrL3M7L|J2}I9~jDMLF4=Q?I-L+iUW}AZ;!so+iybYSMeJMmywB`}1oR1Q7WLRxuU3k8x&2UV> zB|cWYhJIo%Ev{n)nk$^#2A(G{&>$f*2H@5tw8$9#Y{b(1^^R5eHiuR%7dsD=18tl| z!r@8@8F5P18?u3>p3b2!dsV_?t+e*w#6oHAbQvtm*mOtMaPCwggmFku-UzQfWnpn! ztr^xIk4>u|$dgCqLR5B$C*6ns;Oef4n0v{feHnTprLL=Ece-AF-a_3n4sudty6;Op$XWsq|FB{!N1`zM~4EFww_n|K=Yz^(xa1@$Tk z?Od92+I_k0HKB9`*9q9bCW`Iut0sYlRP895-{*rs&KIa&HztO+$w18bJ(Udr2*5p`K%JfS{{F_>`0WMc8eC&Ltq5u^FNSD z%m^|h3Fvga04sI7cuEbH`M?`P7i-MdOEo1!G`c0ctMT z@^zf@QKmi{rrEOUcI0`QE4W5t z8?ZK~Mz5tFTnefc@;+chL5eHu58Su`F9&|k-Jn%*lv~JO$Rk>{&?-JQ@kBxwve|0N zC5OC{)*neeH$N2&SwKP6Z68AmfQ}%&fcY5ItH?(5EJ4)>{-heUd03BDV~ExW>YGny zVjN!>KG(!M+$@3B8DgH=(^J#C=#M%v3m*``mrPEYh+nXb%l#Ein{HvVrWaKbrNS2# zJ#_UhW+^J6TP7_{>|*-<{{X6v7tB#SrXOMyx0#{u3%hAk^05ckvk-QloYQe^=oWxl z)}ECwY}y(QQ_$-g%U6}u_UIEn*%;x`<$L^MDVabStZU2_dWd3d>P0{ZxS-3Pw+lip z4Fi@H_mJ?``vk?6IL}aaR~4_AEPl@0p%+ebZ%zumS}FRx#d+?;xogavSZvho}0tyWawow49 zfn~RR{{RuJ0j|Yab*+g-qt{sqpd!jZGGdrJ+%rra{-L8cW;l)CPcq-d;qi zj$ctWg0BD{hGqqn(?zv&D(?K&Vq)Qsbg{+fti16p#d8$v5yKU+&tI5BHDvt9QS37* zYjWC+?<`znh8gsF)!IE3cBjlMG1a>ubToQ!#!M|5tRcQjX84t+;LaWY08_Qf#(-x} zU)0Y+J4VVV9q970x0X2=&A90?^CWM0;~_3oc5>Um1>KqD_^xu*lI`=t^x? z02pliGcu=$;ud9ipo(LIigK+ZK16{d?4eVZ#xvhBEp^H(o2n`HL??M~NvxSYu-$-d z@?OTTt{6TKM2CcD>KMZ0qTi$S3G!hmJ`w~CWkoN@{-eW!t#`ZqO&>Y{eZ;F9?UpB! z--($S;GHNq3K7Ig!7NmLuiT@m7D44@JwXB#A-j)H<}LH%^wRt>jqTx-X#oZ8x^hDC z0K=gITU#(WqW20vLs)%UhsijUB!+Br@KmKm_;+fr%ENQ5WPP%IFwO@o1L1oo@X^4K zJ7XC~U})~fy?!Ubck1dO5OlwS{+}?^bFqAD+*=yVDZaTjd`I;M^{M@?9q}8dJDZkR zC}s69G(X9j}s*!NR5cm*%86DtPFkFpL|+giTZE^Kr3 z-=YcEEKqPEqFqyY`YRJ~o96o{}{r$9SdH zoYzQd!n+sY4B6J-8rWG)IDg@|Grn=4=A(MYCs=?RIfLIUcRCBFCp6+4DqoZU&h2-f z%LyvY>!R3kS+^Cw zLaR?1TTbtu+J&-`z*`TAhClRf;YD_z%p)Sf{dAwXl1t)z+Gm>< z9`}E^6e@PvJ~{fhqi=ZA-ec#Pr}H&B>R(xnZ;55g4kh>clsl8|ikefnD8~_FBWNL& z`hY%15JW_kn=c*M(Yj9RAVt|~=7z4m1__RRVME>H*!CejLIQ}n&Bewd%tDZwk2J(t zQ4<7ZTn2TY;#hVVL^M!Q_*;k>hCPDrAdVD))_1#ykmi)EXClA`XE&pz=VaoCvz^;dr_^RK(tO34%mWx)AW;aoT$><`fHZ@B@9~Gk*JxX% zepFOdxBC z2y19jd1_uQmD+4B6=8C4uxZF)?Nb-*$)Huc-g+n`%Q?G?3@$!VVXTpQMQ;VnA+nIGIG4*TX78l8qbtfK zE^NvinJL0P52p(Y2HR$sxC1FW{77LrZQ%Q$8%oj%~wC(C6U>h zYVN*?Y$}ZKI2AbWKjsKAfYm{1YUD?8te_|d&?O$FWq@JYJ}>Yg(25+ z=|%vk=o3j4&B?}Wa)c}!VQo6?uLM7{srxKgxt;`=U4OY#oYeWqstVzyg>wYLj9gkbvY z<>TU9t-%VvWqw#pamK^4l$C1uUJM7RQUTh0V5Rch%PDhu-71AkPda(iA?~2IhJ*6{ zGNFhm#WuX6VA~GUFX-FAu~N=SA>=9cFrxRf4#vmC4>ex<(u+&atXW9a?*RUW5IIoL z2O~?mnqDK;{{T_nRwez@e(ol-TzI}G*){?I-FI0-tQdpEv6X01!rq~dF;^7;RNJn3 zKM=haNcbI|rrj@B;4*u;co@o>Ae>Q?T*T1IL%ULcS%rj@RyyNGS(*WHj)yl;NU^pR z%O4XW<&=6WL=LaS*pRX-mA+3{fFvB?tXehFJhu}(JXwEHYUWW-g2$LLjF`T#X$Wk4 z!0oicwSloHO&aNdM@8MToVNy*im+-h2^_=LReoxzYnXQqP@hgJ(c-H+xQBM32m7`M zW)RH41wJFPqHT>YRV%|PQT)n@RS?s}NnCP8YHk^#ra`io?fEAgxVpy8#D)Vs;?yv8 z6mc<8af*u5sH@xu_jUdk%NVe-TR_g|{1;^1jMH~IMc|F%qNd_&6$FDt(JiUFFSkbE zoPfx>&Cg=Ro|P8bt}6{)eJrep70AvCGUpUaZUaD`7Rm4Nn79s)4dh?MV(uCtM~gYv zX`W?NN2K7O6<)#>GSW->e&c&}H2fcMRbu}Dr}j(8i^weZU&=a7NBdRJh|a+*RPLI; z9q|Y`7zBRAeAnvjeb9m8$wovCP|2PK%+~u%km*wUK=&}jfk)(xKXX^7U*rrF5lb}n zhqC2cfkUGuHO15@shu`6uL1+a*$^Js9}?(v;ss#)fYA7ud@e-mJ6Y{J~8Lo4(f9gd(|kYiFE4MrSKt*y6f~ z>u?lbWG?-LWfe9ubwf9jVT*CANvdx3Pi|vP6&fHo?7+NDMzXpWZi{wtVLe& z@t6_ZZu-S_Hnl1PwB_+1Y*S z;?KBrWMnT5Fov`Q2pbVfU?X2(solL`*s8#TLMrz!;J6icc@4i1vKA;D?@nbUdl`Qb zhD!1%GCimk^}M|%Kiwt1aS@ibnNMWYTDpp)--Y}^$otuCe{74pO-uD>`h?FW=500%)c^U@a5FJ5{Pe5|l^+*~U;H~7>qGSzNHDOS~9 z^Kij~npdKl_CW-l*gpwp+!$r_7CbC`k<*rVenAOxNf3Sv0c*Spb-jEuF~hV}`?2|# z1+(_ilzauNvFu{@kghB$&&%pLUi5z2E8xf0?rr=|{zNUv4Ph@_dp7}{%%QHwCz5Pg zTHSi1#(c!HLNUIZD?|zk%f~C*LU*h876Vq??a8pUl|8{Qc6g<)!w0e}0%@OIqne>? z7gcb8TRadJ# zOxPZHEIdOJ(aMaxAn_U6i$wx!Nz-O#cC$n{-Vfc%Z3H2FgLGP24&~j02knO2` z22`+st7i5#OLsjs_k11~k&a*pwTX&pr=IRebr$Pv!1`mF=Q2Tt#=c|tUd{X^eZ|xV zARl%oa}@p=T@mjwv8$PnT)|x`UUcnVBLc1OHJ`YxUdeVfi9@}{sj0u>J?Dn}@f806 zsPsj3iP-l401_$8eWD6s4@GJmUfF4^)6ylcc-y*tsNSgvo4oKwnqE;7z`nQ~)_Ucq z6JY1>OqioALVv7?XCL@7Z;^3Z?p<+nFQkeFarv?PntwX$*=N+(4`Dt4U=)AZiBq&H zyYw=EkD^bcm&*Nv}ELKCZr{ zo6Lpt{A1>!w4W?7Z=cW%UeNT|eKviOfuAa+NL{E1dV!_hV~9bInY$Y)o9X!|8+Orn zvB?D1)kO4}s4oEiN5d^%Tvd;Y89>n$3toR{sdUqz^4Rt<5MxF!>o1Xw+!Q0`$HNvS zMymnYA7gPP(lnZUm-8{Q8XR47lhkytqbTn#pE1k>ZVyl__fK#-#{*}^*OiWZG~~_o zJ@Ca@G+Kz&Zn&pxv*OV)RbKn(XLSy$hQOK@mK+)JcOJq@7iDO_8SelH7T81(CmsMxA&FjtB>q;~-hQ}9*M z;miYA zlpoAz^&c}}d6NssKd(ryw*ELf1LS{E9)H_VM!zEj(0Ld#R90*~A_w6S$*0CEuZ=&{ z^87$t$MrkH&+UB9kDL9`M$139FCJTKJ=v#K>GrP}Oh;jj)ScU|zK=rwS`yWrw0WfIK7A zW%(h$4>S*ur0IjqvmUK*eB$2>O#|7ZwOL+?ZGd%cs#*DZLxM$^hXR*X;$6OTRfVi< z7PoLQN20L0yvpBL3A|&4YZ%r-ujUz&Rq4Y_-a(&~scXyj;KT?iSLMMgbuxV`vJP1d z9&TD)0B{@U)J;)UcAo9yJ!%_PZmEI0rX`*31#Nkm!5s#EW#wAza|}CTfWc#Z&%{G2 zM9 z0nn%~ZhO}KK=q+|B*QtL8F|e`o#GRY6?_*OT7dbi{oFP<#Jq9z41J_r;xTWecpb+0 zmyVGYiAONlxUW%Bkcz%OXZ@*)zUlt}YJc3H_b2_y{{UKl)}QsK{b~OIT7TA`^{4$Q z{{UJmznXvCpY|oXzcSrlnQpANtX0~}b!EL-Z&qLb0F4m$JI8kZN{4yt-bi1$ciT1c z(%dk}@7N+K+(|j?QGVurEpPV%KBstp2hAU;gx%;rTELh7h?DAT~Omgeu1jiZ* z{;E?3;{yEXwd*5L@Sd44HE%S&5>8h6iK8%pTH?YXIo6)|6=?(DcRzVPcN6x*CHTATUIZ1hzGZ zLsf%nAEvbem2zQABA{D94rPnMTxGm&c(Hy1mODCF0_R09b5oefxy)I_w@JOzS@I9u zb-azt`(;Yze7XE3*0(D2>UTwnVHV>?_g(8xU@?EpFv-7MzGn{`F&%L(F)hcUlVZQ!#C*o5i-XD6ZFbR zGSIz1`Qna;$^|jM0;Hg_r3M1m=q;t)To| zMLHV>n%TrN1TxKk-X%SjR;Ugwj#=9PRJa)G$D~`LlBu^hn7V#lXd!)_8EqkjCQg@v z%)n?2eK0U)3SmI%mc1%Q5SvZmYX{;}kmxALs~+#>R}*NUHl2#RYBuSqsm6~-#twut ztXBedP-P2q#$g;c&7M%uw!6j}Xicf8-mnFG7VK((m{!xU*xKILQz5Rpt?7B8{^n6J zn?t3t;t6pJ6Ov!_IQc~hW*Sc^EmNAL-jcrMz(Z|6y9rvo;7?37o=4GtH4v_n)O3MW zwZ8>_GVhq+mt@tl(~cteXKRJt$fQ~(^N;+MLIior{wdMyrhIqR_>o~pS-q5eOYWFV zmmEKc3w%g`R@x5?*8c!BrR<~&E$+wZzo||c-S`-`=)K&q_C!%~Y)&JEG8qalH;VNK z0Lejl7v3rXx(Y9vA80hx zlVp0!D<=i$YbUApuR+54s?-f#){Duoxt= zz_qrwcqXWtFsV+WP`GnzTX8E7#9Kiu&M58+xpiMe0h`B6vm?c&=4qC|0m%@O>P0}? zVOi&)9FPMVg^67f{Y*T-R*nbBDKPd+?3@Ac%!m;(6Kx@Dk%>{J@1E-XGT~%y!0f!# zyfI4hm;0BDZY*L7zqq;UEv_Z5EO86D&S-)rc7NoWie$F%uesWYFK=~F!c)kn$z-$% ztI?j^R~LX&ipb7pwLU8=BBgoJQa1F} z({M&AwpW$qALd#M14Bvz1i^RFFMX6mYbxyZe-JX70Gp9Ug_eruG#j!4TcR)+#91|T z277dvf|LQGG&wy(%nBX6r&|0(?!5~{9pTa9po9x81vZa`JA$`!b@aR+Ew^Q~7~HU?HoiubDtB3izB# z@e((9mquk5zfs0mxy4jhw0#l6EkF4#H{jr`E3VmA49*GfBrFT1rRp>bxj)zWo40Lj z_zQ8TjUJStjtbJg!IGbLD1m^o!sFjA` zTUUFO>Z(M?&A?S{bXuJnr(R)H6`$3EeYYw=Hj=Fv#_`8UORnvqWf6PPDkvSW@?+uz z0Jn4_obKF4BbAhm{SACa06pjuljMoBEUC6v*y^rc!r&oSZjjU$1x?Xr&$(Q9Gvb}g-|U*4ZQyVxPTQR`V```uCb||r3mPP-J3av zF9qpGUd~N-gv3@SBsHT&@o4^`5*nER1*(X7Enh_Zf?N5-VioRN+NC3sEo!6crExEU zTG*{i82(w1-cIzju9PWfSdML#CH~!y0d5d-ulKOIxJ_e znQUgXwyn=7haZNo!D(2A3vCjy8<#mQsyFzgk1>&VI zbD-QuF*4kvP&&jNC2LTc9b?SwfAVoK?j?_R+(ghy?F|9~s`Gh!Lr<+L5}%@wxA7@J zLuf_G!dZEZJ*;vE4rI-{!$G7_{YEXij`=n?Z_X3^;!F5}>_bvCs*3htRK&+!9hgGRU`H5N>d zbWb%c_boLWR6pu*%BoLCW+A1jmrLDh0~Z`3^f$I7hP6nvV5dAmTH=DI&C4=!`#JQ3`$ zRK(jK8EY+ zW-B@r{{R>F2RYJxgkQvF>cjQX1sk-lycrYVY#y3DLjbMEOE1L3j2khN8}}4bKwFna zZ`7wo+LdU5f{!cCrl_ILELY@vi#bYtM<*tHONOS*zU?i`lBFJoBkvE)02BeMvR*M^ z-IPQPDzF_sARr4ELlixsJxw5}rFsc)%fMA_ZEWLSsE=TG3j5F`BknbH1;dQ18tDK~-v0pZ5+8vG8``TqSFB=9AFFFGA9y`Q8h%Ah4E9--D1Is>uZ z4SDv=Se8mo0C=^^<#!Y812!@vpFGjFZ&EI%Q7iLe8?0;1nIy5NWI!h@!Fp#>Jl>*(+ zFmJIe60kv>F;$}Uf4I~ZVmC`1=bW_(+KivX%!*x9aOevan;-WaRYGR_K4K}uQO4r- zimlwdxR;$`E#rvSEW?<&r6Dx(L5jEZzw%HUI$8_C_?6y;Z4}wfU3s{Zqi5{|5bL=- zGO0cX;hAVuCP10FOX$KbQX01`7F-2pE>d@r1H^05Kk5nzY-TOTfEJc>#oTgM!v}YX zih{?WYcjVrWVy+DF{Yg63}ZcYf)?8HW-JXuN(mi=$e5EudT2dY4cRH7lidefnXMy7!>7tDU`69HggGz(jRHjzt+nS+A8Ew~qShdl*imMq6lG+UNV3f{T)@ zL0%}PKBTm1plS2}0Amt3Sr?HIg4gpRs3uJ(9divp&|X3H0YV)|F4q&z3yYo4a*LWr zy5f~Fb7uTTo#HQ_{F_As7O*dUfI}1M$%UhyOSLk;e-(&04a^zQCtkFj|{!zK147U z!X3uX#2U4b#S-iC%0l-FHE z^|*#h7E4#bs8Xd}as?%NV#iAnCd(O6?#dsMC0!u^`bl9O(O&Y^MDqPCd@aiy;N)CQ ztsC~Rnsvkm&J_#PgoROf z5f|pTPbrhs&0X&ct&6XSwS`veAX=k%G-JF=UG*VF0*b$dE@jE3actjYXr|WZqj<{!MGD>*E{s9wP2IKIZ(91E(hqQo%hM{t zVmVCq62Lmi)n7(%Ce|X}slFf%7~X+&?Vrp-2>$@dbBWl#BdKcb0|Gms_-Bx8pN8da z7f;S9g>j$R+{3%dZ|V+O9hd(AlE7Llh}qgx-HVls4)Ndx+oVKR!xxK#88kjaYtu5Ph`nx5y4bGCJHkJ=3JGoabjHE4ZdU680u2-(An&Xm|0cWtZS#3 zb_M};#l7O3be1_J+#U{&$J|;haI`IlM9I>;%L#xQ*+u~K=SO%Pszla$>~J#{q?SU= z(trXX%P}~qmJn+3I{Tfx<&)st6eY*|$J7cv$Pb;EQ1A%12=wn1an&bC z8)6(?B9p^RSUfSl#f9bJFrNltcIT|j13XLj9fHXz*(#&fITc{6%jR&ZiYF1-Pp)B0 z6xkeT%nBBlD$C!@;wu*cta*)*g52LI<&;|jqT(zI2Z53}V%qX{KauVNzryidRr$!k zn~SY&06zDX*^CR6Rim`Z)pwLLQw?fl-Pq2p<4<0akQI$5!a3qH5GfXraFqr9a#uM@ z8+Tr#w-l}iR&U0{+~X+C9e@;CmnRhqLG?2x?aehjC4i4yE`Z zX>VfqLK35)D(}=LhM}YjD#Vm3a>49^wu4DHjYMZ82AoB98FEe42kKBM^r822^5i~B znZ3#SJjD-*MEO7RP^F;tO9vZZlI*wvYYcH1%wVd?ScTz2w<&wT3OgL$!})4nhG?Tj z@Ql<8&crW47f%B|7>n&oZh`GuCK9jQP|mPK5k)#bqos$!cd+7G29(sqEb?H zW3sF1Fc+977=_I5te}lH^+xIJ)J3=gfE7mR9OX7DD1@>3A+prE1M*NyH)|$%@BX5N z4wQnAhTvNxg7&Nz-J)e%I7dw)bjQIeLty};rpaiUH!8{&T2pfXyfNExaOkuZr={x2 zRInO*%%U0CL5C9!85mGiR{V9D>?fItJA`iLUJ(`>azrr8FbAc;5*HN|dM|q1uXn+f#X77fsF02Y z#qaKQH4v2*!-(%Q2NJ_5T8+4IJFLjhPk+!yGS^}R(vpySqsi_=PZ zXb#Zn)w!#--)%+9$3}6he6<4CylOqj3M-)Dqq)qRRa3oda^w+$i!!n7*r&0{h2>x5L**B%agKx|Gf<%etnv?!m1 zGee%r^YLGqL=sy=EjfK)Wi|0b-dJTwXxpT4wsTy}9W+47e5`}V=D~O_3er5hdNEz0 zq3L_pJ&9<9FddNm1OyBM0RqoL%!+Cp6TV9I8I>~x%8u5lP~}a6vAnUsXbqFMLH%V> zsd8L$bT0eFcMLXeJ1#t+{{W10GT`*j>RFu8s<*PNmB7U#Rd;yi0H6YbfLXfdkGP3) zrh=9=-x`_$@)Pw1*m-jTh`e#;E;Cngu9EtM%{5ZfANena*nn>B8bbQ9=|+1)KupG& z>qCQQ@6>0nH80oPpe1?((eW}jXx9D9s`CkRh!_@!Vi{0V(qcoes8WSTQi=<$U+Pn6 zMXT#CnHNVZsoS&^$nqE51!p|M;9IVuva^GTwbm!7LfC6?mst6F_S-D*4P~Tv#u3jZ zA_!|$B&sGV?pW?i7OF$=-^@z}Jo1C}9PKVs)x7WsV8f$K`FWP#xS>kBR*nG1M^wJDv{zj<1i5(iAtv$Mq8n1om@-V5-;pVPK$JjRHee!$UDMu zeO`{N$w)?)8hVnxYjVikupMFscITH<_?Hh?nNXg~j|inWo2x~+c6fvWoLqb)cKkRw zT`z;Y7L#nl>FeKdr>82iCAME)#Iq{*znkifwda(hWLru^4>w!-k8rFn!lG#D# zhT(_e&Y!7ijbWpT4tDli%mlfuQEzZEwxl#IAC@d)39)*sogMr!15Xmp$++iN&?|wE zTNvDRK|R3H;^?_ChocxJfDZD)EkcuRQ`q_$u>%t1OGCs92CyRahFncpG(#}dZZsXQ zh|px5pue~gbQ&D^%|e`jH=^0e=O?*-4FnX0{A)-p2&`KqE=T(@vtBbDh9-o;#M zdYg>PD*Eez=?g3@p;UK_nrx%0DlLVQhp!ic*nt346RAryyLo_;DPg39F)Gh-E)@w$ z)-HFCQS6Ac*Fv{s2d=ze{TN|g?_3JHdQ(B+0>f?e67=|Fl)487994(taq*PDB3e#T1bVO;PhccrEXl$bWOCePTLyR-mc#pIOT+_>- zh)OL1(tk(*SXxol{{SLcY+xu1#}c87TjY#3w|m(j0fk-mj%;+b)Sz5EoyEEOKk{_- z{mNZROTUP?3^yypA(RN2dJe=WOaLZ(N;qnc49B|*K*8hOg`1CNyU?J$?x)OcTF08ywbGP0C1CdqXEG$`9U z?sWU{hg1H|eJV*#GuW>wmFj!rh`IlO#(6qLd3id1m zIqZNJpLa)#nj9Pm$(S_C+=wipj&j@p=TWcoVWgKDw1BkGq zR0?TjSToiu7i$6QFaEf zSN{Mf`@P4A=_~+MiKiJRCTmsm4K(TcA*42?{LD&pe=`*@j*RxiHva_ zzF(=nZq3NtZK4p=Hcn$QHza@n0oZvaMr6g~aEPdYQOh~7TwR!iQ48hsGru!Cyu`L$ V5&Y8yH+zKCOzz4|&hzGf|JmN^ijDvP literal 0 HcmV?d00001 diff --git a/doc/tutorials/features2d/feature_homography/feature_homography.markdown b/doc/tutorials/features2d/feature_homography/feature_homography.markdown index ec7913c330..c4f0c00e55 100644 --- a/doc/tutorials/features2d/feature_homography/feature_homography.markdown +++ b/doc/tutorials/features2d/feature_homography/feature_homography.markdown @@ -9,125 +9,40 @@ In this tutorial you will learn how to: - Use the function @ref cv::findHomography to find the transform between matched keypoints. - Use the function @ref cv::perspectiveTransform to map the points. +\warning You need the OpenCV contrib modules to be able to use the SURF features +(alternatives are ORB, KAZE, ... features). + Theory ------ Code ---- -This tutorial code's is shown lines below. -@code{.cpp} -#include -#include -#include "opencv2/core.hpp" -#include "opencv2/imgproc.hpp" -#include "opencv2/features2d.hpp" -#include "opencv2/highgui.hpp" -#include "opencv2/calib3d.hpp" -#include "opencv2/xfeatures2d.hpp" - -using namespace cv; -using namespace cv::xfeatures2d; - -void readme(); - -/* @function main */ -int main( int argc, char** argv ) -{ - if( argc != 3 ) - { readme(); return -1; } - - Mat img_object = imread( argv[1], IMREAD_GRAYSCALE ); - Mat img_scene = imread( argv[2], IMREAD_GRAYSCALE ); - - if( !img_object.data || !img_scene.data ) - { std::cout<< " --(!) Error reading images " << std::endl; return -1; } - - //-- Step 1: Detect the keypoints and extract descriptors using SURF - int minHessian = 400; - - Ptr detector = SURF::create( minHessian ); - - std::vector keypoints_object, keypoints_scene; - Mat descriptors_object, descriptors_scene; - - detector->detectAndCompute( img_object, Mat(), keypoints_object, descriptors_object ); - detector->detectAndCompute( img_scene, Mat(), keypoints_scene, descriptors_scene ); - - //-- Step 2: Matching descriptor vectors using FLANN matcher - FlannBasedMatcher matcher; - std::vector< DMatch > matches; - matcher.match( descriptors_object, descriptors_scene, matches ); - - double max_dist = 0; double min_dist = 100; - - //-- Quick calculation of max and min distances between keypoints - for( int i = 0; i < descriptors_object.rows; i++ ) - { double dist = matches[i].distance; - if( dist < min_dist ) min_dist = dist; - if( dist > max_dist ) max_dist = dist; - } - - printf("-- Max dist : %f \n", max_dist ); - printf("-- Min dist : %f \n", min_dist ); - - //-- Draw only "good" matches (i.e. whose distance is less than 3*min_dist ) - std::vector< DMatch > good_matches; - - for( int i = 0; i < descriptors_object.rows; i++ ) - { if( matches[i].distance <= 3*min_dist ) - { good_matches.push_back( matches[i]); } - } - - Mat img_matches; - drawMatches( img_object, keypoints_object, img_scene, keypoints_scene, - good_matches, img_matches, Scalar::all(-1), Scalar::all(-1), - std::vector(), DrawMatchesFlags::NOT_DRAW_SINGLE_POINTS ); - - //-- Localize the object - std::vector obj; - std::vector scene; - - for( size_t i = 0; i < good_matches.size(); i++ ) - { - //-- Get the keypoints from the good matches - obj.push_back( keypoints_object[ good_matches[i].queryIdx ].pt ); - scene.push_back( keypoints_scene[ good_matches[i].trainIdx ].pt ); - } - - Mat H = findHomography( obj, scene, RANSAC ); - - //-- Get the corners from the image_1 ( the object to be "detected" ) - std::vector obj_corners(4); - obj_corners[0] = cvPoint(0,0); obj_corners[1] = cvPoint( img_object.cols, 0 ); - obj_corners[2] = cvPoint( img_object.cols, img_object.rows ); obj_corners[3] = cvPoint( 0, img_object.rows ); - std::vector scene_corners(4); - - perspectiveTransform( obj_corners, scene_corners, H); - - //-- Draw lines between the corners (the mapped object in the scene - image_2 ) - line( img_matches, scene_corners[0] + Point2f( img_object.cols, 0), scene_corners[1] + Point2f( img_object.cols, 0), Scalar(0, 255, 0), 4 ); - line( img_matches, scene_corners[1] + Point2f( img_object.cols, 0), scene_corners[2] + Point2f( img_object.cols, 0), Scalar( 0, 255, 0), 4 ); - line( img_matches, scene_corners[2] + Point2f( img_object.cols, 0), scene_corners[3] + Point2f( img_object.cols, 0), Scalar( 0, 255, 0), 4 ); - line( img_matches, scene_corners[3] + Point2f( img_object.cols, 0), scene_corners[0] + Point2f( img_object.cols, 0), Scalar( 0, 255, 0), 4 ); +@add_toggle_cpp +This tutorial code's is shown lines below. You can also download it from +[here](https://github.com/opencv/opencv/tree/master/samples/cpp/tutorial_code/features2D/feature_homography/SURF_FLANN_matching_homography_Demo.cpp) +@include samples/cpp/tutorial_code/features2D/feature_homography/SURF_FLANN_matching_homography_Demo.cpp +@end_toggle - //-- Show detected matches - imshow( "Good Matches & Object detection", img_matches ); +@add_toggle_java +This tutorial code's is shown lines below. You can also download it from +[here](https://github.com/opencv/opencv/tree/master/samples/java/tutorial_code/features2D/feature_homography/SURFFLANNMatchingHomographyDemo.java) +@include samples/java/tutorial_code/features2D/feature_homography/SURFFLANNMatchingHomographyDemo.java +@end_toggle - waitKey(0); - return 0; - } +@add_toggle_python +This tutorial code's is shown lines below. You can also download it from +[here](https://github.com/opencv/opencv/tree/master/samples/python/tutorial_code/features2D/feature_homography/SURF_FLANN_matching_homography_Demo.py) +@include samples/python/tutorial_code/features2D/feature_homography/SURF_FLANN_matching_homography_Demo.py +@end_toggle - /* @function readme */ - void readme() - { std::cout << " Usage: ./SURF_descriptor " << std::endl; } -@endcode Explanation ----------- Result ------ --# And here is the result for the detected object (highlighted in green) +- And here is the result for the detected object (highlighted in green). Note that since the homography is estimated with a RANSAC approach, + detected false matches will not impact the homography calculation. ![](images/Feature_Homography_Result.jpg) diff --git a/doc/tutorials/features2d/feature_homography/images/Feature_Homography_Result.jpg b/doc/tutorials/features2d/feature_homography/images/Feature_Homography_Result.jpg index d043a5a6e952055f9364a9011ced8af0df68634f..35b1bb634c2e5d5b387e914eda2b238fc6650bb9 100644 GIT binary patch literal 90026 zcmbrlWmKF^vnV>aySux)y9S4WVQ_bM2@rz2y9OEDVIYJM+%336aEIWMkX+txuX|+e z_s?0oy4NGq(^b`7)mBx{-<7{V0N5&u?-T(rFfah+w*>gR4v+&N!XqFez#}3eAR-|l zBBNrXqN1Rn;$vc=V-w*M6BFSR5|UCdP?3_+krNV9vs2SCGO@6-kWg`Ob1-u=FtafK zg9HW%2?-S$6%Q2^kC~K^l==S|{`LcKkYRG+6yRWR0I)bPa5yl32LWUN01Uz#v;POc zz{0)pi}Y4V`c?qPg@a&a&cRgQlTO@$@;d7TSFB{qF*8k1C2XI^TtTZ|094oP+uz%eIVx6-2iVPEcla zW)1%x{r~zEt~BS6Mo>Z>YEUUQ8mox>jv-vy6C9die?#95tBssZ1KBM#j8wBv0pM2Q zF5p_LQ2(#0|NbC~;x)HTPY#O=;Fp2Nb|9WDQ^dfw4@X2Nr4GX&RphsyEknf13e}d! zxBre0Ps>2byrYC4GXX#N2_O%`{~}(&ozw1!$%o!6D!WoiN@IN{tu2kdmqpKBXdf0s z9XZc`(VL^AMMugl6E1DvSEh)}ZQrMuhz$?Gwx4|?z+4F{jsyrxB;|L-sWrkVG_?aji~%zj#==;;1CW`+kYO>1y~;*WA(p*2 zsRg?1gsKd*u$>~{OI1a4Ea8s>kucH@#HiHhu-bdks6blwER-Q^3*#yp=34l&%{U_5VaWV)d3-T3aS%QTXFBmP z%{*QzegdrpW>cFUG50PCz`ZnvKS4$w1`!X7Tv(?B$GXV?-@LzB-h38-0TfR-Mpj0_ ztO9Z$j!suC!$|A!MUX1{4(_5-hv5-Se~u^sAQGVWkFk_e&zAcog{1u&)(>X7ZA`{1 z5I@84goP_&Og@7-E96N>-$D$R{V#onQLSj5k2ZI0* zN+^Y$wQlh)&50txStW`Fd;$u8JSc0&2Eb$I#GHv*)j|VnS069as#)ORG7^50`)ymIQIb zxg=kW`IL}7l@2KOXPZo?bW5GntddEFuu057O^ycMvMn``C!=)D?v2jlq?>+ zTn~?%9@I4zC)2@MWsRoXk_^EzwTd}1yapejtwCNVexXWD4ZvVz5Qt4^wVr`1j_D$> zCUrKPpWM%yrfxMn$z(I>I*NCwaZ#LO60VXm+@P&H(y>0Nvq7)O@}k)woGjH6F74gv zk7Gt>!^uL9MyF%P#f8zWp>bb@dR`fh7#g!It?PEMf*F|cCqIVA?PkT561|(EE5Qfo z;KkB0iuHX0z+x~lvgaxCbT+K`)aL_vP;c)u4$H zzY`glwA98M{R|SwOkAbE~>B=3`B=lJ1mig9S=7NYSrxo zV;$F;tUB#eQJp4lPkvJBMm)qvuTz2y2+PKdSiwiY|KzbtYG(+51%%;cp+Wj%Yb+4z zKl0&xt1k?I#4l6N=#GEgIB;|zH!W~zHTZ0rF?U3h$R;!)EW)yP{R ziTsRTsui9cHB=r9T5QsmT5UZ?c(SaWtfOW*_DVSI1lr2@%LP2aF03~6_}WMq)H&+R zJBYD(nqre0;OZd<~zwF7*fABjZ*#jPtvi&D?H^pks%EyrI)=x#C(ra{FPm*qM9| z#?x+i(7{mwv|8QQPI#PciqxLaJ8m8`HY&+2$XJMi%IKS=33F)-D0XW47#5RQSwopW zqEwz6;87|s?Hz5c;;A4f4;21tC<6Ow4hMk1PGM1s#BPII7~m`o13=6|8<>J4UJ|)g z`=qE~^J-h?ml3bVI>#+?;@_md%#jpZ7O1tH=Vyu{j2K8sHHEI^KT@bVQw49%R9-rt~)bWD$jrCa?1fe zao&L1e}~z(N!@1z14!w^(QjtI=fX!>h%BV#Rfv3&Ws#p`AsH&I#wI-iX$jAHfluUN z05bB}E%szKK;d~xD;y$JTZcxUGu|_zi)@OiWm-=X!R34CvEu&gz#c+Foi$<2wz~NIzdqkBPy!^g)YlD$2ww{@B4ro9(9&S`s zHdhV-_L|2!VVIta5E(gUm@?Fx(IH1)a2&pp705RhgCSQ9hkT?|h|@-m{5IQhkTR|+ zht9)Rk;h`cD0HdEx|C#Bs13@)m2UWOeP1#WjD8pFm$Ed&s%>y2>@_zns=v&?mWKMQBMNoK1c#(fxG7h)i=I09e#FNot~=Le|`~cduToFh%9aQ=>Q)`%$-h(lkE>vCP)~?DL7TUKP zzZH0(fIsVTP~?v1GL+)tEhw;MpkT~X?bbXN zLru1VN!`diLHlr^0|tLR_Se;P!_G0~q;W$-me@*q;`W~6L%%>LtM7Lf?YaWvSL8Ee z${zjVP4Vos5EgH0$2%ew&CkHMtAs@?`$l9~m1f9@_v%G0jT-2QJ!}D2K_|)v(Nd2}< zN>TEJ&4f^Di8>0mZBFN$uPj%@1Tb?$7KO$(aS-m4IT$O@$5ZB@O?e7VYh6g=-V>w8JUPM;SCb2y_1aUU18+jfR?cGMxQz z2wMhukcodfK?d+Pi^=X(Ib}}30+6MJ-I1xokUh(7jMM`hrly*qkzC^$%beM19X*L0 zwn0O#G4=C-H}PWUH)cF_9ymbAyg^fayd8u6CI^ye{U9gGEi?OT*cY}L{ zc_}jWY*-qZ;`h+EJ$EUxu|T3ur!XLt8krv+yPjR5F)@>m)&KWdNOLRnV!2XqF>c;S zjh`*EZ^>>JgF{lUI(6RUTb^iu{nhbU#L|d{gHgbhSgCXAKFEb9i?K(2-8sF2?IOdc z)xEChLvh#0MDFoR9w?E_Owx_uXQydWn1PAz)KPm-{gFu2YJ*1vwRco(h4qsZpViij z3+&HmZfj%<@|DsU>InI;LYhz1VE|-UWK)4e-ez;a+o?mElzL(B?op=5fG{TRdH>vF zE@rltIZ~8pZE}iC{aqmkvm#$|OQaQ$humXFeyw3`1-Sao{uUYp?KIk~G`d~d@Y*3DD*yA5yCPR?r8L7xtPLM)6bbY_A zUPpbc;k~D-NDZ;!D}@cbq*%w4^IkQ&)KzjWR5?$H^;M^TR=;ek z`x#0Z>dn-c)Iv2ZpZD}dGZwEY5Kdx}d@XrtMsyu<@*!$BQ#axqGj7L?1)W8T;#Im# zjQBE$1r9Xs$DuNB0tCoDADI8&XIU28Uz$$e_Sto6v-GOdbiC=5?^-LY+ol4i3>{o} zorr$!rnBF5)}o&9+?m%p4IO7nR3#qRbbscX;wDG}lM2@6T2_fgyLlv~1!qqA519 zZt4+?n9hGzA~bXoy^aPXe-LQ$k%yR(9qrt`rn~gxt}diwr{pr?tYo98L`w#o%%%h0 zhC3{PXoln8=-V&TGYMjjS`ge%&*p-By`hHzq&O}iGY0F^exLwRTf1#Le!=5#hiJe4yX4%v+YhvjYSd%rtg0Io;K^go5cPu8YTnrq@|6uo@L2WE1S}2 zvVU8a!vLZ7Mc{uSj#%BU!heqOlYt(;%F17g|9y`-=Tc*#GB1B)nan$(@g9lV^(fVS zPxIE*aMw7t)}ris$k5KReSKCO2d=*qU*c6TZ=x9O_t^*q8uS}rGqy)g0L|P#)`hou zgr+pW{$FU4DOL@r-Qg%lGLnK~s&D@SxH|8ofZCGQKFY`MDmg4q5J@c zf2T{TP*y4dpU}o1lTu++@AdTqEcs27C5}V;)+@o|@$7nqONT>&PK;3F5dPpO~v?l zR83~x%Uxy6I{~j4@Ht>-#wKZ)e0qe;-*YO9C|)6dV;!>Y{VlNih3xRY%|BCZz);w0 zlh@O8Wgs_L%8K7(EtUAt!5p<*-kQnQ4Q7U<98TJSgm^Yw5uG24>t86sX`N!paVFtA zkg({+n3Eh8r)G}Ej?FL9u@6@`59B=vn-1h{k*~3Ig7a<(Uj;f&Y}YHP5bZQ=qwNBT zFv4(QOQcc5N25uR6diLJ4di7ILqp#TUF<~~|3V9~`PfcFMLfrWxNp$f`iMK=dW~H8 zhW9#{ojA@~HYJgt2O0-zO`8@5kK|p7|L$qNqZqofZgR-%t+T{nU?oC50)sg+6VgKa z;IZEb%|ZsodjEyYIhG86G!`8rj5NSus9>i?iyJX3Ucu^}0P<|Rnygh~JhOn-$)u>I!+pHg|{?9S|1Muqgo{yxY|GE93@sU14UWY8s5&)Ax0S07Djh>lILxC z-yJh=^0vzl{qNXe<_fj_znF1k4bgHd;-e0KzDe!FA7oFBW4gyCZHO$Wt{iu8pe;v~?rC5X<){CD&KxGK>wEqZorl5!?# zJ1jnRIrpy?MV_*1-1g;BsnW+9^{g?IxUntP$i_!dHZB(^8|APOmf0)1X$3>x# zxG(U%83%XAw%)97-aiHzuv3l`N~^0_;#i!ICwHsuXPL1NL^llOPHc`N`vzhZ(~zkl zR+;!Q{UWLTal$h2qI@&|f*){#+4N0OTFqm>qC_5mk}&UlK9X7xFcu0)*KM%PDi43rF=ZlEmci1`{J3AH9F+thm-g(H*A~_eR}3wSrjR-M@h_N-1TpxG-!LI7s)6Z= z#Y@FcG%{jRhDFMVJ^U?mPkPLtsO1vkoAFNReKYX+)(MJJ1~1&Y>*Mrb<=kM-d+cAYC8L3|WSg+4W0wNSjG z2v5)^1Aa;axBDmNawJv+3v@4T?Q$#SVG5R`e?bfYGr^1)8(@bH(~`R+F(>g&*ny0= zh)|F+66g|HVHU|hmqm)wsclkv+vOC_+NXFRhY!avf6^DBm5Hzon2upa=69GY`nNe9 zA+2{_`D73~VunvS{xBCDpytu$g6d5l5v~?KfrRW6%`WUux|gVPGEdHP92v$_Ze5B& z4nXiKV8g{=Ca(%dLBLKpnR)X^@L~gCVPRol;o%Ws|I04{0}FtI$H7Iw!^WqkA)w{q zrQ_z~7mz{JBBYmuZZCXG>`W^7&pJEn$7a?L65eCPaiaW)Y$a&kys;*HA-#|L8$jahL??1 z%Q||6;G0*zgR&~!g?*z#@t(hcIhEx}E1>c)1m9}KY^P9!a6$#G=-P1I(l4+<&pW%( zVc}%mdR~?!fen96c)jy~2y^Hgss?l`GcXI6LeU;2V)?OF<(EB@E@OqcMeCGa;&Le% zO9X;qTp_DdpP}u%-3`|q-U;V@9Nux~^@*8lP?u&}@UcPzz!Gik(sbE>scQtfMOMeX zX+b-J%wS~tFo7~ z80Je5;>l04l4JdukNcWo)-z%IGvPLx;RiDjHZu_iGm*BMZ#l|#VBX7a;I=Ylx=2nm zfc#{?)h>Oi2*?}Y+$=@{gr+R=@YboMCGi4d1x zYgU%dwAwlky(sp`3q>)nyBqY`hGf0{Eg^pOqWq%#;Hs5JGAL=&t1Z3#EkTVh=%-OL zJ-P8%$?Gok#om3J)7bR-r-VXw=DK(uXQL+EdNbWc(q6USIp1GB{hIMSzr}T|>+Kfs zc8M(VZ1D^jJ7O%z^KzJ~q!Uq}2IdqDUe;aKUD01Hi`@^ofB&<0OLi)u)EBb$rzk#T z2IlVD48XE&;N{7OYT;+OW7XEEWO_y7{>zs#f0WR!@q0a~;SaJVcmEO1_Wt2PfWr^TBtx;M!OE z*ZQMW&bGq6)F$3V=`hve36yz*FWI_zcBpKPF3au_f z)aozH#cNxWK9A?tICGNlj^|n|U3?;VP(5qoD4YP>b)i;!@PJh_KU@5nvnE5(C1RDl z{grHR&rX>y`P8HG?Hf}6RdaGZyK!7Y)sj3Y^wRRr(Li-E9CuW6u!x;5RaGVzn}pBI z^~{Q@cgH?cq7A)-(RxnoOtA1l2Ws+^#~)Ot?pS> z?EwLaMW~N@Ymmzh9m5mt0B~m#L%YB$|-ni#97*5jf_gqG!S*)={97-+=Tl*D2GHLS|v zoV})LUgDRoJ$$4~I*Ikrq@775qn5yveUa z6Dwm%2Ryv$&IuHr5;y20CU2uU-1V4AopktqS!(1jAU^p8axUV_#7}&b2dy=-nfv%9 zL{QHqu`rV7=J)Mh(1jI&5s7mTlU?Qgd!gd^w9FB`Zc)D*28@ z1$sCs(9)P%^QP6yZPi?e+hFZY(#|s0L)wBWg|-&f=a9V{tmymKoNfE?S-&N$3aQf( z8igd2TGIQT7=jg5}kTVJ;@#X8gT;y}mmrip_h~qhUWw$ObgY@RoBi*XO ziuG2huG|{E0Rooo!geO=UTcI8F{Ka>k*avuQ!jRp6Dc08@y6P%2OM;q&Yf9v*JJs4 zwdxfbW~)XC)N2s9E<%FOr%CHgjUVAm;txG$&l~a)?1ZzM|7;w&iL8T|g%>ZzkM`I% z(>pfx7M*NCm4W?owJIwbH+AhZzO3K%J7an+ClBBk&0O>^8}I)DdbAUN^vFq7HOc%S zNNCTBWW^R>WA3{4;srm^-sE)tv8KxO%3sHJ?>s#KK`ol|M5B$WnVqXdSAn3lN4F1p zzVE%S8$k|>825N6e(BEy8j-)vcAleNZN^(O{C}Wm{raQZu>alhhG8KDZ*MJr^*P+ICO6LXdn-%U>S`;=)=xTK0|ay=SR{&xsM57r z0+hh+TJu&C;=LQ|1gHrPVUy(PUx3*SRUe(^iEi7uuz3vQDPC9Al1O$iI$kyvx*x%Y z;~LgrA(epPnTPP_ehj`VqfF-CY-|DsCK38^a68*F6(g`?jt?;}f-!e{TTc8N9F$3= zAwc+(vTG^Y>hEuQ@AYm>s!oplI1|o<%GK3}AbVmUAbP*(6h+A7VG$}4uAWk&?TA7^ z*_uAa_U%-DWPi#gTn_4ZkDI|8)cWn1{al3DwNYL2k7}W>)lza!0s`@VRCR<&t;sqa z{u!C))=HjF+_nImul%DKkcF@5TAL70tMEe3Cz0yPOrgi2cvkWcf#Kprri)9oY3@Q? zhb~ieYHO5)4>+OX9xfIgp6e#pUX}1=tMa2n%A)BCso^HJRI-i+-lOGHgo z3^G8##s&Wm_}@Gt6VAc@R50xENQphpR>w;Rf>0h7V8UO3E9M7R-+JZ+TK)9*Ya#Ut zVuJPV`^|AN6E@6FV`Xha9a1;J0${B@>v(a`b>BW7B=Qe$gTZW%!lz&av^2V!^@#r% zrtx!lxLOe?eBzTmHAJwi%4llko&h#?JtQIe6_C9p=;O`haZ@3!Tb;hm5G9JqE?_y~ zykEPqX~xXjWgcN|vdX_{HQt5onE5pe;)XMQI{Yx4-Pus%D){+VbJgx62#&3^cbD*k zdaH|-$CQI zk-2`i1lOaQmm`?&N~{tz-DI&>JGUg5IRhzHQG1vrKN|^wl}*Yt#PGa~@|MGcu|6sN z0%4KW-90>k=CQb$t&KnW;>`pct8D$3L+pTNW5PwK54P zHhGH0rW)znzFt%oJSxItpN)*5;dSp6ZFnpXylrMf)ZfHJLVgV;E|{okIQUW691kYi zFplU?h%1GSs7fTzalj;KQOBf|Ljt?qA_r4Y2n4|{}2Qf ztRN`bB|`LlM5cafTu^kSNv7LVmyhBjEIN167MC;?Zp$}Z+;u5A%Mb|i_jPU%hh6h&Yj0wLmLo7l>R{t;?hyoHcLW{x#CJjH}bp%D{&$Gci5G!@iXY z_IjvR!{fNxTnD*T!Bf%~dZy6n;==i~YLp-0$im|^UiTbIyKEA72mXXNa>DM58rB7; zbFFt~4b2j%S#hxWd1yCoA@{>l!5Ts zepRLPvcxD%V{JX4mJUb7h{xlXd6h-3mdtpk(ip|1jG}?Isk$3s_q1(GI|b=+3n{xa zWpB;G*FaO}I-M@Sur5#aKI*2tQFsze3F*%xvX6;CWRi?qP+|8toZo3#-fFTAL0?@m zopDG8iUj9ohsG$pRm)K6Q6RHn?TkK&0Lb*i>7hLmpN_zk2?2!h&rmk@ltRA*J%ivk ziO;M3lX_#uaa-cV`X=D^FZKgI^*1hphWhK6sdkmzH=~ru!j*4LP5tfs7jSi~1&8E2 zT(QPFG;)$Kp1dxv!H=qoZ)9)M#e!uZQG_FOM3n z_g2>!wYR;m`!hbN;<8&9r?R(cYs>rQiyvtj0}1%Z&R6MvUL|HPtgF)-GZL)d;5L>g z;X-tZ5e@4P5HATZdIsN?e;||k^}=DQaZ@q9zFmLtOJ&0n_KWAx#a0>izprrzYRH5&QQOx9Y4})2)*3l9zwmti zd~2`{DvpW2QQS}WStaep9G+%!&DyU+g@lLGzgOvBgR(bkdXSPXk_n8Bo|lo=I2g=d z9Y#}jWwQ~kna08D?c)qfX^K_LHXQD0ua8m){;tKIj~45U5>$^RTx?dAofmG|Jmm=z zb^raz>FLIInpqI4~WtIuvdd-ltv#H|OjM(Coxg>`(#4=Aw_}fFLiIRK?qy!w?`X8`Jqo*MHF)6Ks3|QC4gwDXXCy~sl)Kl4YG2$}g#v+EFTbiOh&%#qg-0QF#2OI3P>niRgioUq_)i+rTepYN4 z%Ks?^wsGq386PU-dpbTD>$3G-^Iday=&M93aL#M-X#XcUuGUsGrj)S%-wV%W;$?D7Js@^sP3o^DTt>R}X*U)#a218IeSWU)MHtd>O~lnYANNHKTj- zsx(@cs4JMui8b8kWeX{>`F=EZHqbJpVqak?g~Ui0d*!C~T3w(^UT}JnD?(_kicf7W zFIOe`jU7&L2m%R3vvC@qnHb9HHa!oN6mx#Mok^Zaj=er8rf3NAt-fBf8(}FXD-Cj^ z4056jvLX$#BmL+4mdyzN$wvRl`nW;*gh9anWKH~;7>kkd{(!AP94|mP&yT5wfET7g zo?Tnr+1px!Yj$O~zApp>$#F&o8W7dN9)gk)!kWEqZldH7Jx_yeZUGN_t;AU44&?D` zA7AyP$|(|4OJaYIR<_WJ?^HeGLPRaS^}NjFKWA$R4$7w|cIB|uf8N^H_nU(|39tqd zZGVI)UtEmVo!pz~_Rz49h_o9Kc1x_H#s;oN2It8&BXvNMvi|~r=x%g3*HYg(9_mQc z#TMH3l4f!H$hxtthvc`#r#PU_6@01j)*>R{>%dx-n`3TNcIJ-IB4chql3KGHPp%<@ z%9tyu(ts6dy7r|h?GaQZiz5qLi(ygR=68@Mb0r(o=Nauf_)8TYNZWkhnP7pI$K}Jj zoxhC4$XVE{fl1S03qu$B8od=l=VJTH>Pq&pljtudFL)hhcCWbAq||KlRo$@$ke;GRlrrEJ!+JF>H?AAund`?*`ANa1*~T z(*2CTnY3l)l=Oq*5k+6FjI`lc&BG-4&-kiOE?2ykZcoBcYoWkd05-)0X09gD=zIBY zF`Psn5bj1k>#&uce~^Yx@P_)im4^t{++;{`O7m$mqgM#-0JmcG@2^t#60ELN0c*<4 z^vNPxx9928dmDnIVllVUBSVT&m2I1fK8#=6arkN&TtLVT4KDMbUnDi`wQ&rVE!C>v zxCb`*di3Xe^NkuPOdFz{z{UI(TL4}M^Gf1<&DP>7n~8V*5_ti+uhY=GdgQI^XAcR< zhIMT>_V*Y33E_y+*^C@Ff6_y3wiz-o`Tv+wy%ZHR%ooNI{;>Sgc5*U(|3YQIa<@VT z*;D=E5+`-O%v0|~TJXiO=UXy+Ll-0`ZPGAhS)m@)x~xIDE&#&ZsrTie7JsCnU4CSx zJp#PMohAOPrx14BcB&IT`4-H(hbC*~C?rNkSakadxocr%)zf6NV{7+o&KBC*gu?7E zXbb2Zh3cBJ?>K4h=F7KIc#f zKPG(4^o9L3?9N~;8*DKf1)nXry66+wK^mf~gjv@`>!W)#?=gHM#fSB(sbLCiw~fic zH%Pa!hk}abBzDtL_BH7+T>`HTtbs4@2w%+A&=XsuN`Eh^ZPt-m_=|c2Md)<%z5=K)waQ^Y!fZ3Pp$~> z`9V7IV+xO#;FQ6dKi*IyFtJ0^D+oNBttnmT13c7|6q7ky9& zj5JOcKvzo{G`8dC=2aqCOH_a|nDG@j`#-hihM={)BJAY#3cY_FMImwO|Lv&0ls=3L zcyMyrnJ!iN;)#qO&%q(=K&Eb|I!cj@4*3gcHowCWX1`#+&{!>?blAyJ`f%zF<#saS zES?&@fMe%tweX=0o1YMyUGfN8C6>9U^-Ki8SkV7)bF8y&7LW4Obg=~f8V zm?f(Ig;@rwov6>*9O|l|T$eNKj{=o*cl~+j%St*hp~s|{>iYBMm|G53NGk(pfx#4C zo8M~$7+=)PMb+DYuzM%_o)WQ^Z~S=v(bw}ho@oi6B(_T?KVa5xl%Kex;agX8r%FCl zRj7doKKT^iOYrbLM~5b!IiN+7^Z1bttMGZG_!bg_jU0;#LA$obQRWk0F*#ga~YGK&QPQs4(iJYj#nImzZx+CRDGQPdAOC zepde1cv~3!dji5!jw0l)^y~Q=$%>y1w1-+T>e{+u<$gBnM8^&DY>BA*TWSYS=x~(L z5xrX#7t^3=GU6O!hS*I>eJl9Wo65dTx?x)T zDleDgHhXe{`5Li(kP7*z&HhxD$;Z9K1{vu_KnVi|#77;1Ycv!cmw0#yck(Y>h}?r( z$JVlQ^*9xLmliwoBAf%c=Sk3lvdBXYz$^F5@qQYJ624a5Bth(k1Hik?4P610pjIj+ z{8JRibg<-@g^Y>Ku1;Q0he3azbGV6v&YK#J@PAlJyx{BO_p6VZn(5=~kSUBPGT0eu zlSTe{EOi=4|L5@sI;Kr7beXDpW$5c(ftb$e*N&{_tA-!J6?}dSgpTh?7Mh|Ie5*l} zNN2mbMD!cStFT{3be$HK4aGpViUe5PdPGc&+1R~SrX-}LpMv_{0UHRlWqj)eILCIl z-O*$&NU=1`j;YW>%mi;6#J$dO$vdLM`szVr=ZqFRmP*E2AZu;& z2}DjIm=tv%6p@SxiQvM7D04qKay3uhVnko;%$+k))l&5jPqDFoX29nW(t1GUdOikj z6hOr4>m64wMFdFC-M9^81(r;oz-XHBGCzVmyN`KBwMJ6j)a?G$`@_8i<(r351$MGz zjex?e)h4?Mk6|XWeyx3COo%IrQ%*aOciY%PGzt4NH+A?oj%xUe_T8|6VarPg=CDPfd>29b8?G1A#5S zDGcz&Gh2yTpGwu@sOHt$sO38~2RL%I8R=>YxPCq~b?N!@jX*v4>3%U;GxUY3GW5>u zh5m`kA*Fjwg}XqrEVW?rNR(y!rQkhjOKt;$!Jm`N&dmdayzCpLvT(#_1FTRpH#CX7 z)~k;h3w#t$!L~~3HG7%!p3LF0gq$931X9~1Bvg>f9$mAdnt^d{lB|v70bgx)pDj$j zgH0E~IrrAahV#cuD?zE5x$kJvG0FEoCF9>5Uv>j$ByFzh2E!hS#uu{bA0sp(q!yNU zB`U~FZ1NrgAK4pQ;sRPI^5$H?q8>ln-m6i!*K;%y#EV9&s6wCy+Os(X`{k!X#XR#= z1`>K;h;=P`mfriqq?ss&+&iGAb{X~T{OQQp91C>!7U*`II9ROKI--i+rNr&6Z5zugr6fL}jUng5im{R>c>sd?SIkWvlly9f99J-&W0dph=d z3)qmu0sQBKQDhW&7+6HOx2Ti1k4E8eXy9@2co2AHv@NjlX=Oc9OMX(*@oDLpe{7y5 z;O^VGmX<@L_e@DEpKX%fRet#P!XLzx}CTCV&KEBv04yMek8!`Dhsl_N)8 z-s!9MWJ9z1+LvB}#h1#{jJs3*N^PHhSLwP6|4Oaj(TK~q*eTleSc&z&L|#Nfme$IE zk{XiHm>N_MR>d^H=TN131*}rtxMKE9+2V<>tCa##52uv(aYhYzrpu?riYhfhX?dwD zl|5UB46YIL{+h22yfX8z(Xv)GvYj|zDuQcCFF&U_h1W=scxtAplxT!paoJDSsgJvf z5iFCB50HU~S3)z9Ugj7H2J%i2fuK4Oyf6A;Lfv>JJu~~2iC#7p4JDeD;LK>qs-s?^xP(DZ@|`P?)$0+)oup2t z+0d z>^AQZoDE#MmKF2lzoRdIJ%+>$}%zTHc95)k>kKtI4LjX6&8&t5_dz} zK5z$@9)uQ~%eI8cM$qlZeu`ty@^jin-C8+--7SXN@H}&KyymCAA8VwNwmD3dbtO(` zY{jreW6g%Yli4$MXYs(i3%vSUn;skaIE{<@ zjH);_&msu(Y*+!ZWQsE>AJba5+`rzcWtN2DHFVcvl0l6Vh&t$v1IJy?H8?X_^KlwV zO1%-gEe36?uk=Tk-8`+`?J2gBaW>+K_boa*sUHQ96MY?jtY}-H-le#UCq*rvAw(UX zigjCde%TSEGph;k1cx&O6@+=yzGPSolG)}Rd!9TWu;)zD*(cXw%@Tj#yr6m1 zMwFGDrti{@CP#0?BvAO7)Rfk?+D!JH@5iUUWK?UuvaT0daa!v@rmh$1&yJbzcOAWG zzOo|;t!9Q$FFVg04@g`4vEWOKMEdP8i(G)~eVs5W8GNSnI~v$E- z9F9B`nvF&VUKHt++HiFoMk@)g@&?L>2#)K)i9VDO#CCe`rdApwkPY?=5z41k&K39V z)|w#=^x@$(BJ?#Cg$NA|i)SQ5f_&<{Qncwb0`UeSa!F5t5U-rp!hMctrr6pgzas@q zvh{Py0?l9F`_4;#Dx_e_ceGb8*{8x+U2nfuN)PbjxfMxe22U+hY)2-nYHz*Aj_ujU#W$Y6b*CxApx(RHIs3v{sLI>KOf?`)5bQ{2_*vg_pT$*p=#+l z6#9ZeT5-P{HN$K7@;HmklcrOCS15R_zHj+P^63}{y@h27+i}R3UzFV}ff}Y3HJ7CB zW}h~o4j{l>Nzl27e9q6mpH_75$l=T+ehzvs?xf9SQKgXeWLy4N;y8!I@)xl8%C%&5 zNf}b*(8}66gdu>ahajY~8k5Vb)?u*4NGUm%YJjITvz6o>hJ(H@n?F6|Gs4>xwLpxh z%vY9&#lg@ZGbKVm|73yId#4?AzZONMJ0>t2Rh>8dv=Uw-rZ>JfTr=neqSJ6l>N+2z zi2c5fJ$3PR+F5vFlC|>mpePNY`-tehjjpKZJ2n7ORK8i|WL5g0n<}-9AIS=)_^SIZP{sWp&eh92(z$qU7Xgeotij z^cNtB@Ncv97R!cS8%csfJyGKn!V zvlvoo^Yj(m$6{`?}b}aOpd_@@AAN>=7pc3%(i|W*zY*TXb8x$ha$RrIk)q>7uL}&8-L;C9;5!pijO1$xxJ~G{Jqc}u^(YN|+#JJb z4*1zY%hixU}C;W6%397zQ$KH6ThL)<26=y{Y74V7tE-77j| z(2t)|l^A=^<|$iOOt)4f6FGT;SjusivJ(ASNczKTn0YD?X-n87`KN?5mP{Ydeu7U;%gzB%4L#2SqG6EBO;(58Tcf%jED5yhSfh* z-R_n{#!INsl!EahSd7#6+te%LOR?7)_>hQxxn1V2zHjum}&{Gx1c z0Gl8ldt4M3?NLr>$;eEHuEnO!g`*uaIEBwiX;j#gIi|(Qe6LJ}V{OqU`P)9ekBqla zxZ`^m)Ac+OKWTjUeFRKMsqWbMYk)jiOM*AwLD#y>;2hGIt35ORi0dZcuOiQI&f&sc^#3T)=1<)&U`m1DGa3{>FHH7+jYkh7> zXIs1iM0imm#$cVzN)_sG&r#}m((ax|`QMLt0v34lB4V=LJTL~~;`KfE&UDLJR^bzl zs4bEW;{Ij%znq6l9t>oI=aXLuKA`CJ&d}~Fz}tV12-&!EZ@@-B406}%Z`N~ugk^?b zk^7gy(D-E(L0H~!Sjr}h377z$%RURb&R*p31KF;sR)0XF1!oykR9f!?&=Uu-F#XdYxta zfHL%~^d8NN00u8KEEhc~8%JbDHa~jjzuDxj)NsTLOuGms7nF{0weffaG z(7s~pbK1(uMxOUc`y`|>7&8pc2{vSd*-{0Y_JsSH;rwNv3j<4q>c~bgos&wqYTpRQ z!4Yv#T2x0eQz_h=3HO8^#Qd_7fjQ`yNcXvG46hg($^2bDsqu(0R;%r#ta2gKZ%{^^ zW#PjG$G}hd06(t0f#(-H%2Y0Z7kZW?)vP*_SJ0d{w2!NRhNEL}DKlYZ0V-N<>D4xz z3I@{Z=1MQ4xY0Y7ko%v=Cr&PU6xRDxGoN8N65SVT4#db(*;+3+UIxplGWh%}OCPTw z>oXx5e9SXIBZ<9`s+n^&@1;Vx&joJ+(sh3v0}I>h@&H-FNf%4gtSoi>A-THjndf3u z|HEBg^QFDP?rE!)oj|FOG4VN?(=7AIwGfC#T%U{WNi{6L)OH=rBkxlhHj%39iYkYe zm0i16rs>n=pjl?MaFJM?PZP8^KuWsVEWmCJo5)7ZOrF}}oRpQL_m=rwAW-QHL)nI1 zt60+&?x!zpQ?o{}Qo|{CpLRwR5|>-vN6)%09Dtfpecbk_zf3>;Pt4q##_D_A6+FMWN&T+6g&-Q& zCmj~7?#F$G@_z?IGE#EEFDyACfVDSPd$NYmleLccnr+E^O^Q~N5Ug?RDioS;N}oKXcGjL$i7sh(Tf%jT3~L|ul(!W;e)9>F9I_*N(+`*oe%KU|MUKdZZ{?;0$DmGC%ocdHu}j7!i{ z-S6uOv>ciq#2DBCsC_krM^*gvgIwqhhBgqB zbI(;WIFgs%s}8hSsjE}Ec|JEjB2tEQg|w1;Cuk56he?o;7=435lFbXBOUx#+!6Z0= zyzgKoCgojR2b6?frZN! zW1g7wrIlL1QJK28OiKwed3N!>AUKUy_Urupx7)1F zZ8{G3%D1Dsi9n0wx;8Qg!S7*WOfZRsPgi^oI z@hj@CdzC3(OB7_fLrpSSuS@00DzL1oq;cqESKZ!`w`;alT)KE-u9U)lyn0c}C_r{5 z`_98-jO%}X`jIbossh27zCyB@{Rd=1ghzyffrf*CO#cZA!u~|X0f)mW4o|IW;+$}a zg)3p4`yY@A0x_{ms&(Og-2#~Ygg{JJ{{@&}{sS<@X|t*_hP|haoGzoiFEG7<5}KCw z0um6bNpK<=z~YXLtCF=YBaPOPGZOYo7-fX|@R^!dDWl|%foGWjr*DyhBkPo-;mbXP z(x+ipblvlrV{LSlLWMx>Vzi5%gw8YO(dT| zk#csgTAeYr4c}wkrn$-Sr?R?6Fttr#c`WrJ83d3FexsRm8$G%{XYg_ua;$A2a!Dy% z-b1G;Jvfz*QQ_h8QTl+w_AP4Lex(Um$C8n~F)x_womxO{dfV(gdP5y8XcHaeopyBW zXJe;vN*x@jsowau9Cry;JO#AuC;i|DAxZ*1bug){}f5TF4qj{_B- z)+S#tc6pJ`(Joq_Oc7Sk)~jS4Z%OL;FT#TlC}SGlKnJ~S01UdWX(2a`uT0?Nti|lR zA?Zj^aWbMLlsR9<3o3GCh7AiJ-zG;%?J?DNV>s8452!)qGNr`>cD>+{>Ul&26C4g$ z1<2ewOWg!U01k(<348IU6*c4qdR;;K> z&q`h=S+>;B3kuFS?#D|aHDcn|QDT34#z4#_lHk6j0W@zko=?42<`WzoNSe>1_*-1q z#G~@lz1AGSk&*k+0&r=cTSn)qsY8zwp?YNvQ>!_Lnx9KdT|;F{+z~w8w-$d_m>6Bd z)IBe7V<2&**Sibxbdr*TYmz+2Jh3Zu5>{B3+cb-x0ofXX+U=uL=y4opS_o zZQl;~!k>G@G2QIbS}cjvlw8tAVqKBA;OkRUZDH9MT*qO(y!@9;SkCaY%|*}Ja`Ts? zw`%~-d|Mq@h7KAZP-y)!%527PM|^DK4zo7Xw*$WGE5VW)6Jqfi!TF0adS(UtQQw&x*;_ zdKK{Vu-b)0ZYZ>^Hg`B|L-6OBriIJRBIm)^=T9`~ z`xrx3u&?a1jY_PcJi@xNOB&6=tFy|8H$hfaD*u`LpB$Q3c`I(xt^V|g7{$?(*HI)d zFUhZDn9niN)0UiE9!rAVh@pAwldAqhKhbsZ^LSDRx~WRvY9FFhQyARrT{1wW2Y6}R z8G8Y}I4&fYV>=86{?ahdK5T3h)+Td^<7#WoCteZFj%OL84r$o%8iFGXaZ;jrH*1$s z0Mi;H8&Gb?biiO`()4teE)Q4XWSNch-iv>=zSA2WfyuLq2m0`&oyQ}n5TRz=6zb@J zxWabV;%@FJwT7kcfe|M5;5^TVA`Lli_;7@O%Hj$QbF9+iX zRB@DMw$F%Pfc3>OsGQ?5;H|x=$!^@e=Hh3|m}^csT&z~cPVRGe{rC@E-qF^ellz#u z=8SSd@wGqwJvtF8ZY@~soHq=PrnBbLrZU?&?*3@oZ2$qKV==;vXroX^BpRcMrS4A0 z)cXsxb#j+OFGZ+Blfs@wpq9YjdJ+-o?Gk3tKaN5KTStS|uXj+#gr{eHct7#kEBIz* zmV&H&58A-2BypJAtV{K*QlVFaT}?C7rAZO0HnAXtIOA{Amndc3j<*?efCWNJ@?dUv zm1teE>{o=V;_6vN-f8!i1K7@;C9gkCb>41vdkOW!JiPjwm2P(X#E#we9OrOB=0DZtCq3je|1oTgx1v={gQJz?f}k~ zhadjGT{cz%AlKBpvOEm$EMm;tVA?IK#b7*?6OOP2fnWZlAiLuMg~uPJvlR+oN}jn% z^w_p%-ZTL#)}rzdh+_X;p-{v^i9DIx=edrJ!%>=2eatG4DuyH>qgC zkRADe!g*J#G7c*6(Zu*uFiHrf`Wm&)x*%PLj-S_o+_4`HnV)nG?Eg^^Maina94t&s z#%X8E!qws8jPlXN>wP@OdUQ{6LS0I+o$a$1*L{PUZ+&d9y2M*xe^ zh!Ce~%JF`A@98(l%T~&nCXTo&!M0mP_q<(^pykEF>(FA$_aWy9pxBlAo@Jw`bxc`h zl#02P@Em{*d^V$PeLQ%PqJJv-fGXE?3|{9f>Tk@5S6Qof){+mr9vOD%alMHX|J`b8 z05|-l+W<6_60O>(H#W~=5X@(ZoVxqpa#JI5bT+HLOg0pa<60Rj8y zMHb6~6^@3S>A1rg@1eZDw&-vUID8rJKRMGrkoK;qZz@N>agZL zbel2n>>cHqA*dlDbCYBZN0_2=bX&58V}YO7c=}o+Jyv*_;3E;uL%!fEfUI2X3r?X% zi5YiY@6qttT-MAFdKq zx)|o~-@dtQ;5olxwDA?^P}wjR|T*67;>PfQFvY&^i9&z1~)moF*K2kGc;l^ zQ?r;8E9r6JO_5>nX>7)2qUZVSA;VFt>Hc+42pw%b+pF_rX^(^9fIVmloe=m#v;t%N zB$w+#)^|Ojw_3e$b+3)q)DaEOq;okO3vH?bxomt4m9976(~=L@#wp=NvUXN~;0B6a zM0(n1h*Scle%Tn1Z=cOOX8jO#e3|5%Weifh!SGvwqnE> zY6q0pQfP)*q90$LMsklFp`(=Lrk0{htylH3;3YIGj+;`WOx8AG{lc;+oWAps^c<<` zO}i`I3IhgNPg_@5r|vw8ujn68o?S=BVzVUTN-j}GxJSBON#B*Ce!wJV?3*kl z-yfh8m*lf<5VmWHciVpg0fW$`ZMUOR5UUu#VubC`jHtPB(xJfQEgBI zizg(=a^s~tRu!k@FZa}{+EXoNI&pKKR56VLx3w9mCP(*~#ML{s^rV&v&3zub8Tfim zZr^E6+k`y4&y&6{EPH&*p&!vrA?{l~OWHo8t9{4PC<16(0X%`i0wqa&nR{nh6L{VWV;N`vbtv#|~>{e{uqp*;8o0lCwb=K(rXJLialn+f@NnXVZhP}f-JMiG{+ zlftw!YmBexo@%Yfb)E=KSmZsh*c8_E1r7Cw4VcY|>n$DUYc1`gMv3-@hyJ8)$e*A&ZJA ztF#UzIZH=??k~_b@|c!Gd1bM#{;M~^(+*#AtNe3G-KZP2FZ$?V$RM9TM;D zFf5pFvD>HYd^&u#yp8^IE1=@hfgD%&<#`01#HlIs!9_-Qj7E^mHC3kA{1{z&@3#{5 zEb^c3A`W;p1XpajQ@1$7W{rhC)_60u>Ov7i0GJ1*=-WRBrwaTz;o6MS095R|jL=>2 z=dUZ$9Jx)I8)^dlHFtwuaWD2>YGpy`A5b6$P0)Aqgp5E>B+d#%K;$H#yV@KHQG1rz zAwjapcRkVRSEgZe$D$*6#pUenpDOKAS}pqfOw`@W*M$7__G4Eg=3Kr;KO>`Z?j!ZF zkhrROP7Nglsx=0CMYIyl4E-2$aLPTW5r3Adl}eMz%UXU%#FO+N_HMIV)kk8Vh8~2eq{5RO%>MJ}!1q`kTKJOFnM(ubWa}T&7b5xqa`@$USUr4-d^=ZaFw0uw8y^`QL{Vf=Z7_{= z14OGx-&#*=;2ojSWiCn=v%H(xP-ZG5guD~?JjT>cIror&ipk(XZfX!HQJUx4E{_!+ zMZkBcd<}Oz<&S(yZy#N;8JUi56sEN%GVvHu6J75{_A^j7KA=)IASjPC78Eor3=9GS zA_6=t9K<2$Kb}O`uvDKou&5z^M9vVb$K_W71mTfTGtFgBtm&HC!sY%Rc=a(Ql zZ)3As0Q~>X27@35lUO5X1>nyBOKN0dq7yig;tEM|R60LN6;lQcsOS*?4*3NAD=j*i zp5LQC+==p>o+D7-56Mh_r&BpbMqo8KkC5}zcY6+2A?_dq3|UE1nX>EeT;J{7B*p2R zq&VteR6d~KN(HPOFkxHxg95KA);gz{NL1Oi<`DH#85Ht{k*>dHqVT>qNJWn!wsO(NV<8y!m6_;l= zCR&Hh4>lBg)L>N2V-hOw!Em4_O`^}6@i3h|H9E|R$lWXdQXwRNh+f&HELreV;_ylZ z_POf=$`LA<$S~fFgEWKi14=`>AWMfWP+AxUj~8;iv8;&k-#68GUS=uuF4BWzL^~!v zRw;^@Gu^x~z$I4Sm~Rf;X3d_ePWXW8w0X2@RM0MCzO|?Vsh&H!bq~ZmzB9D;g>P^0 zX=iR5v?o=mp6{ER{0`$^42S#aN-~e5y4MyJZ=1b675@cOPi@JTgqc5Uz++iGlPrklr5BH$JiL53CuD#kF>1K!$Q} ze0d%5jeIxH(Rl3;#o#23(-SsJy}`;pN9r~n-^k&jIiJHJ(O7TSsq_gMbIT(O zO-Qp}H-nBIk{!fn5Z^{K1n99&$_^cos_gMiG5IIAVKc@}3i+T=z42wyG4Ygd2uQNq zhJGzU482>YQ^@CK5c(d{IXGd%60KRQa#b}!rB9Ytxv(JFDFZJ?cki3l-ds{C&S*iO>k6u_r3BBMd`iD zf179jvL*HZG+-dbZX4T68DwCqmN{f}dCmjM!~2%u!x~e)Z{%6!qHbljd`G|=)wKTr z($y_lVG2z_WtkH;u(a5Y}ciue@ zL2ba458oSsY0;E9U`Sv6+VBHvO{e3$a^~NbL6hG!IQON%$LRhK>5Q2lFJqO@HA;ui znQWn3PIaSygf*XlwbkkxG=?st%k<4r77y8=x4GuUyDr1Sz{;)4Yc9FX{>ZXXdfjTF zs!k%e_CbwF0x2#>NbfVorTpP=Xd)$z-_BVvdrv##cmUuh zcq8z8{E=wp>1_`q$*CZwn-#G^KX?1p?ReT_glL*Zz_mZ*Nnpx;VC>pR?E^P3la~c> zbnEyUiMd@lwVg@5qqK%*xBWIiQ!P3>#K zo?;67`IHrD3yvI7ssWfeRWDDbnho!>nsS(1_ow1SEhj?Rtb+Y-u?Dfj=;vY3DzS)M z3eCKwP_Fugzqr;#K)#>~X!|dPd_RtLGr&yy%ls?8aZE7LlpslA>Oo-+!)*=>z^3>uQ>f=I|pJn3hrA|hIc{by$FpGCoA%ZZe&3tE`$)pjWDrN@*-_a(5L+0jAX>|9e32Qq4!CIoX59=lzDiTvQP)<#vVi z9X_1&eEDehBobz=45JrAknHoNg?Yb#M@&o=6 zATh?dSsD8)BU}k}s4Jj7GG<*2GQM5hp6?y%i4aCQND`RK*;o1O|MbtzXq{>BV>S@Tdpol5z<#DQFF|2~AuO^$D#m_Vk z_~3*n^C@=)`62UgXHC_m%y|%p3ws-4A7NA$k<$FoX3mi6c?_Bl$2QcxPLxaUE7x_J zhR{a5#>BnPR4SfuNV9V}Z5F2CE6r3Vr7D zU>$u#XW%+g`}$YsIH1c^yFHRX;c%6Yw5WeZ%TZ4F+b{K}oB?Su+$jL6C9nI6@4raS z<*1RtfZ@!mkjHT^vJrTB86w*V?ZYvK>uZ3rVb|rxVTJ@4>u^48NVKER;?hcOUf>lb zVP2UueS4wl1z3*YtPYd)xP-OhZVY#^P5iKG66elWQUpMDjoA^RO_VYy3UuN_^GQcU zmL6N=Tn~4!BCr+F#`DuCb4PEdBsVT*hZ;WqVz?zkN^>>!h`Yo0A4;HOrUhAl?+=Yd zDf+Ldd!wA$%9Brm#=5Zb4=0Az%fO7jVz~JG>eiFz)F0RDsJ#7cazjknZN_>6a}iHr;7VH(H4Hs6Cz4M@lPcVl ztkO{Kr}4#CcRyG5wK4Xi-4dQ(W41tnn#AH$!oxYYiQOu0iU-vS$-=Y=oncUqpZ!;& zdCZ&iraCJE0>g^40P6JRz$Ez}K74{?LfwaM1D3d5n|PKNUa|x-_P)y4LUjk$ zJD&i^3XW~KRnrz> zK_UP<%h)@$8Vr6S;~h8RCxT`gZYK>}#7|Yzx`+t=Cm=P@=@Ft(+Q^+I`U#wc)7g52 ziI4L997TFSnKd?MMu^mQc4bCUR(#BYazBz=j8!ITIS)q|P;up6IeH7GTZU25y(I|< zf=PX3d;t72ixf>hBj?Cm^R+vAvX8I$cz^TlAn@ zKmy+S3A3q)rf?PJpuR<@`v%}4Bz=8K%b;};OYDADa%jXk;ha1+kiEjq?6w~A7+tjqS>U3! z!qIXnAJGs3OGx9%Xw`BrJGD%8n*h-GGndjD=OShY9qy)Di|}_q(Gy|Ev4WnVYNNBL zf{u}c1js^D1@hIl?~Zeb!e1p!x^()&B2%;FxAE%5=Sd>rM);Zb+NlSkewl#*R7>^p z^L8;?Fq;c9YPo?hCF>ATWN$3bp9_l6DtVANZN6dBs31ob~YSNrAt*f&1`a-r%$;Q;I*tnLv)N|k*AX-Y&K zspKYl{Mx%{cRst%RPzYUl*1UGU%AZ3q-pF|a7Dm?*m->~%6*gKu`T=!mMBPe@hzol znI#=6A-9u8FN+e(Fr+Cqsyb^;{y?|olLC)_{oNXOK7FB(}S>a)E@YkfVG3Mmq?J}|+ry#oXTNgw&6dKL);8SU|1 zYf&}uFM^hY%K?Q72M)7J3hk$xHO7>?L2^NTKeGMRiXRYS$3>DCV}~GuZ%PXz27uE4 zs){I`d~S0=j=Ih`W@P|6!mR)OCAYGN1bMk;2c66M70Uw2Z8dkw+gZdQ%ILBfvj7g5 zchqdDK71^#sNR#Gb%N?crORRehI{Jx5=o7iG^4L8G#wzd`Ba^%9kS>$6`+tT>w@l{ z^LP{>!n`MDswKOd;o_N)g4V%3bDcA!;6&sWG|x}LoGhq_xxN(q=+nyZIdFN5)iJ&F ziU(HNEMBHc@$l3ZXc=Kp@d33c-z6V}M7O1}BK-)*bLl9yOJ5<8%@&Poq;vrvR_lKD zSXr$G|3bP3*hV?B)Xm6}LKuvS(A^Y@m>A>dWY`AMMCl-lkeA7JVxy{sN6MGGmY?BSS7LhD1WE+Z^(2AB}qhTI1w&n1J(MC zPRyBBsqVT4>&=D>$vHolsuU&ZDqDs(zs_+;swpF{jfsXhFAeRAt8`7vU<2<`Fs@Wi ztg~!X%K|7$7Yb*Fhx((ckrSusOih^WxqgNg&YZB%?u9d{vN<62&p0(O$s#f^BZh1D zNeprX2kTG8H8=dQ700j<>;MjR8(&Frqv^%tStBY>na}G(S1$pG_1x+?W8)*5H_M`T(qcNSHIZ+?G@ESW}W;y|TR1euG5*a^lXEQ>;r?NuL85!yNWX=l|8&q5X)HFHvn4!a%f=N%qqS^Dxm9p!+Rd)z?HlGp6bz zG92jjBLHujn6ocx{P3=_WL~5%x+ngpm^w$@z-JX%XH4Ua%DM(R?3hvxlM7J7c@j%S zSo;al{=1=H#n=EpYRRg*u;$M0)7d(F#dzGZgs$re`48j)-loi7sV9! z#kOL^;2)KDlL~x*yn4=8bzTxWn^v}fTE~~)snbe0b--Bqt;Onpsc_k|dB|a;o%FRu zT;Wep=?8o`rs{PpBP8hHv)A|<2qb3~bPTORJrlnjW1;Sv)sFTQGB;y=g4CuIhC9li z?%K<2&g-6|bcKE1?V7q~(js0vj1g6`*gE`g!g!;gV&82Rcti0+OZNQG^_}=fsZ`7K z^*0M9>x;Nz6_XFBrbjz%t4!t%Fyi;a2J#Q6$igy-QYN=hHQVtzJ)gp6(ITCxI~$WZ zXPbiAIiI2Im3$Ifa2lR!{ps}#VkAfpG-a>r_#-*tGxfZi{OvDJr^fHDncu8pY|P8@ zJ{B*sPI4KWlX^UX}%-tAPzHVOx&n zNS94a{8xK=8O*X2FHqHri}~=#XcJ5}@C{4Gbc;>(bioU*M9ebU7vW6!8pw;8zDZR> zMjESP(h;pYp-~&@s}=sbY~|%n*^J71vt%ZH+MB|fnj4486Ow`5>-}dVm@OB$41hVu zTo#U}BKMfY+`g_1gBFfiy`{l&%r4$fN5dab>SlG1thEvUrmD*o>%y$t>hbH=UU3)! zaAc>e8I_la-G_gcYXqAW3vbii6yTmaSHlg=N!otctI1StHvA+_fsZY!TjYDj9<;V3 zdHbCLp*-0f_fbQUV(g3qo?z!OnU;${K&Dn%VgEJq7$$rdZ<8Wdx&CC$BMYf4_0*v*c+CH9v1TKNyxO3$rX};0Z#fn*z-+#bJ?l)j0ebUPuagjUe~3_5S2>_Ri{mqeD{+Z|7tl)1(XS~Ch_}N9LSM4 z%T2(qVq2!ri~c_t)qn(WjPus{kSoIye*7TP$bW&7PUpR;jPWyMS*0MIvUzFf)&5)`NnIRnegl(1RPsMcy;(X|vp3)9ZGb8u?lbrSmF)c-XW+D=Lvw z{Eb$3EqdjJwK7PTzv&A{#TEaW!MgmlrqZ=G@g?OCl>E(m&c8%#mQfBRvgIP$D^$h< zky$b$mNKAlEp$c-`9g966lh~Hrz*|EeK8)E-+dv{7GWM4?#Fv!nH3g|#Rn)N<{URC z@KIVXWE9dXwvXyI3lRUWG0q0kfj5-9n3ryKjtNJ%?Pq#^TR)Vr`WazBYiZXiA5)0= z@tS>~w2e&0KiZN)Y4N-7NAG(n$r*%(HMtv+b3k$oz$d<7i!-aQq1y8+d!@;x0EnYBH`RWP;n7IwL6|7jHNW4q) zrJ3?H36qX2nw>d5=Bn4{x>_!|t0z{b03I%#3gzMQwvlgyoaxRY3YzPJmi^63k0=<^ z*fs0ycfn@9-FG~sJ=ZO^B1wp&i;Rq%qGecw%J+-8fw`1+-U}Ac{hJjQ>=WG$r1wIR zrwyyg&6HOZd3QX{=$+fDnyK#_Q(sFEUFRz2Sc`sM*e4f!_nCh#aiS4n0cq%j+Qb&{-{^vK^z8@!=BRm`j&R!nXi9g1wWQ)_Se{)rFX8zh=`NdJ)I~Y*3{q^vp z-bxsy3HhrWU?K;J&v`FHE34BBymL2P_t2Rs#hp7m$5>xYvBeZ(2*q0pDcuz^(7uvo zgQs5oW2V)80nb{v?`#8IW!9t==|psz;rBhhA=nwods6Slz?O3rH(F^owmN|0nW<60 zf7og+r}VBq{hf&qQbBUgDF7x9go-lj639^pH1Td~{r!)0AaS7(>9GWF+4{mJ$qRPB zS(C!8(9Dge@l%5wl_AP(Ku~32@lW2Ei18APJms=Q$_8j^kjjq>Z5yP`JzSeRheucB zit?rI4Ufp~V?{VnNIu?NN26$&DJ$X6^*M)B<8!2xhTh+4NIO6bbiVJT+oF~@1$R)t z)ZT?OMU(WS%gSOCgx)YO9#**Ymfs zW2r47%JKM3$;Qy#)MEz>LS9^E=GxXT^XRLg4BX>u!{?;9>b8oGvan-Zoe-8>z1pvy zEu_&|0E)NQZ(H-Sszf>v}!miQEFsd9xY| z)_qgCAJop!sMrcoV5%nn1a9DaNU1uk>x)EJq;u^~ceEyDZHRVC0#d-6xmWy*g&e0^ zGPzLB%vJJ40;Gz2t~gKLGYWV;MEh)p9AWan6hqou9%JS#tg!wjx}^MU+S4<;Rlzr? zIH(zegN8@XEXHP$KMx;NhSlAer=r)bs(zS>_V-Au+3y)2MpQrIPE<^2G@| z2W^J|XYHP#aQuTB?`J6?VTr7`$nqf$bPi#4FbL@(;aK+|>wq%qgg+&zEwX!H32Mavtuv5 zTf?PyNl3FU?ql{Kao=%jIP2GU23h;62gA9XXNWu6na=t%||}K^}(Pu!0ElL zN#&ek4tVr7^IVp0(9z)CWPILSWh-+39(j8tlweqsWyG6Qrkk+Ev}$;K z26!&JGtww0+5Vy+*qC(|-{7tE&$hzG%?9&N_uXtMV1Eg9cHU}BEb&3UOAPVgos=O+q1VpLMTkyc$;B<)87PVZ8YKH^(r*Q&}}xWZ@B*vlj@IE_oy@haQFZQ}wJx(r^RITX<%SaKKu#Fea0+?&fK7bGZfWp+X&vGw`O%SRT zMXvC%S|FkBv)L}>4TIa<;@wCrH&x^no=2%fFS=|nWX^3i_f@S0SPVGP!zel#HY~Sv z5{hff)T~a%h@{1Q&K*6Z|5f$f2V}|ZoTch3dZ>Its<1OS*6lzWPsZLtb7%|Y6>{3S z_rqx~7v%LKOFP}UfUqln&wOJv64%}VNi*njk2vN-;8kUMcUsD3sruhr)N6y3CpMoU zpLK=xYr}`cv2}%d-xRAS)}?EMx?OP5l#fc@eU9{*XynB+>&BG9=8esrdz4UqNZEAP zMFY@!_8hN#p}xxc)^L(VK4{E)qD=nmiFJ+B-=aI6%#-3xM$4+>-=JY_up={*T1}A< zDR$4CIt6208CP<(*73%BcOgi@vsv!OuwY+QOJ&Ntzh<@nQ^ma6bFd6PQA?Ek@o_?*0JfBfU!AZ-t(oAVS%vW<`drP9l`*KzMXo*8xo)P9R!vC&;PjK_m zxCcT-Hf9Tpj^`W~N;#o?O`j-fsL5pN%jaW`orJw)osKFZe zGU;y^6D5VTf?Sne=TX)xr+MMd^S=@=?71Q19`l)nr)`b7m8$rcK>XPAA4or-cJ;cH zC*-nvry>LI`Y!Z8-!vFT7S1}wBFUHo(teDmMLDgrn@f*+f|O*Ur-txZ?v5K zY>>Jt+$-JLuAKB@QAIWQvoRsYV`q0zPS65Kwpl#N`BL#~C-O9>52G=ju#Gk=fw`QV zi^V&3Lg?@7kM_nd)f|je-3G)1k*+!juE5S_veKop;!oRP?RYMFp*8o+3ep>TGrInv z*$4C7I1*m^qGG=6X@I2mC~rzy+^DQLtPi)M@0My_Hox zvj)!-weISd6f3pV<={}kXcR9iGe(Cl%16-mPfK$+=07QT@EDFS z;L$K|bbCK}(#-R!z?b??VuY`Ozb(a!7uiP!O67zXyOu-hf4mg%dxj{z9}3Pd#xQFL zjz=340QE^PrZlVM>k7sxNxK|IsMHj~$1XYoqhY0xwI%;>jQ0s7g~1jDInmYKM^Kk@&7IKWgvkFToxw)-bh5TCSdIPifhx+^!Z{ZqeYJ)=4E1)2Iz+ zOppvIV=!@Hh3a}+!QAa-XHu#>m#*lK{>!atrv3R9>Z`J|cRN2d>*<QyNl#ahN?I52Iykfji)t>S)A?=BJ{pfWi5go?==?G7{?`%3h>p%J=}r^eUyGB z7Gw>U7j~OsqicT!ZEwIP;1)S4P=?q{>cVz59g*rMw`6tO! z^ZQr-07PPX>RAom?P~+7s!$&8ZePR&in6RuKD*_D*wM zta3z}ZJnjIDh4)y={SaE34zI3#X8M5PAYfk#kXu|4g_mhjS!uS-7%vi=R0NA8dfJW zxn?JcKvQO@Lnz{~34or%-Wpup;t>@ZLzv&SfAp8(BrDw;VPktvL6pQaD*{5x(vViF zEpRSr*mak)>d(t$Dpb~J_dg{R=u_Ycx8SCw0>=%Fc4}KuXWZz<+N3t|#Rh+gi%DR8_mrVE}QO6T$Yj~-M z^oOB4RYwld7)LbY(g=p+3Yv|>is3o#I=M#u91y%LG}s(4_$=_deHoYVYYa14O#kO=rArd8k^3 zI4yA>W7Y7KP<2gsY~4dt9Sod?vG7{%Eq3rTLj{1p6w8@=wU)3}K86ad>uGK~IV^n& zqnt;%?~m1cL*laz<`=A{)5^;1GP=Rq#fde|#|G=LUJ^Rsii8(ikKjzz^n7!v&TSbc^_(P4|$q@y$8`^AS zrWP(R6pZ*N>6Fa>0R37ocRjR+uc7f&=wKkqgq%??+={0;mKSQ*vCz{D?DFiD5^S~p0ZJ|nO+=Ojfw+jtM zL&i=;uFpliDipd!7e22@7_odKVs-5i;WQu?N(%KNK5GKpvCe z!B;vlw2)i5cFE^;txl7m5vSx-5+{A=^U7QlMi-QJGsox&d5okrUu5#qM-jU>0y*FtsJC*$_5*KXvt!6O)0BWlZ4a+a-HGDl1p+W8K zg*O6X=v1d^y8J?O%$>tTixrKXt)W5DiI%87sDq!P2+UnYT8#O?+k0tvCnu^`_m&#Q zcR4xuhF0da^`|u^6nryD(>`lvpO9Oub?O=@#o>~9|qLq%c9QohroHX`NQZVHA7+@_1L zxf+zl3WK)K%q$g}DmRMtJtG6Y8l&@FTj`l-#XBKglS$c!c1mbvD5T*oe(w|;4|HwV zp`!aw**}sNBbq;^fbp=bg2=4aW6`VO{U)!1z5NSzh189TUSUgG*T0f3agu!i35^!J z4q**Pj|~?^>=)bUVs@)Hru0sUsN0tw7Fy&x74Z$aD|jk=v~WVOXf0vL?L(_!VVQ8) zi|uTXEzNcTDD_|t=udmyS*?kCjEgR!nR}_smpt75iIgSXSu*4)&}J5ovwtl)1mn}q+87kf%(oO>k{{ThX^IH!kmV<=YtLCFj?DQdEe`D}dqUu|ydibZ8lzG)0 zs%)m#ycc$GE|Ej9;@H08gIZ-&BeLB4eCGf5qEDCTQmGoEe{Qm%?oi?(JXAap` zCq(aB#XwiNP)Fpgm!az}?TZQYanXiU-LR|x3(wWs%KIKr4NTVZEj47@Rcx-LjozhD%ifu7F zU9c)_+6DRjqxmn*WVMeZP(B(j=~nRml5~c*GJqfQj)ELza9XQ4D_Suu^YnK)Yb$ye zBsgU8m{)sT-H*T*{Y7WLfy>K4b8`hf&B_7vU(;)hL^}mGrnmTR(SZ)SsZ5r+?6-8G zLaRuRdQ{l$lql4Ba~n{|8C%o8r2a|L*uH~lFpmY`eqtGa5 z9*un^JO2P5qtX8W^Skp@IiJCFuOI&aNxZ)@naNe;@BaW_YW`XdgUKFWVOzm}|HJ?* z5CH%J0RsXC0s{d70RaF20096IAu&NwVQ~ykR2mt{A0Y4#B zrLM5@Z%ZwFpkf*n#djtxbzKQ?ccQ0bdQ`YO(Q_8MCXa!j*30-pp$$Pv;$dqlZ+&7_ z=?c4;DixY^hrffN+bNr3iFFp5@hgO8oA8$mP|a_rq_aZ#s)Ja$bi;VG)31RPmat*% z;G?iBy=qtiRshwPLWU94@CSR)En^C8R>A)O!TmM)|lGj zD*`aRRVdJdrZ9l7oDvQKWm|pFQ%S2v#*=2{Bu%y58#=Wp{?R)YCM$;36>&s?YjFRULc90B)WM&MOEBZ{AEf%M4Ll(OqoYMS6v4f8JBGzW5~j~WHVIb$ZJzl)j|G3AM|4_v;PEVn;Pz=qZ^|r@ef@2IzVRqqNUHd!^!- z8bHq{{{Vpi000000002`ef$Gc>}4i_v7}XMn4ws9mxuvbcZ#iB;B{MdSMZu=c>AYY zcbhKrs+J4u1p-Y6r>yG#09RKvG{2b9-8%mO*q)PIr79j%=_$lg#kNc@nt}kdLLN|X zG|+l_%N+i<+tO&kLN(e3hPsb$N!Q$=^_XHgAN4!o&fbwiCab0mmW*}X%p{=ERFE*{ zi_%f!^uC_6y%^f<0|U)1r>rEAm{a75TXrRB$7rCJsdn;$0E+7J>BLl0u2tXv0D%Ai z00000008?P<9Zj9H*-t#pYC6$uKmyS-?{#~_dnTw=lhS`%U*rIbN#38f4%+0{{VaU zKiz)k`!C$#^}o0s_#^r}N@M-RGLO7`Pp*7V_CKhf@BX7GIu>4MqJ5+0eG}$=)8>De z{m<|}xR34r;w;4teI`B|G{QGS+>06;ahRQ}mRpuvmRrj$=3B~NDQ;PQQ>??0I?2>x z3-U$f1eTzpFVE!;KXiKi(#Q9o>NDzDusv_o zU+n(?Q~f{cP((l4;r`VR_6UEqL-t=2?0-}IJnJC+*YiHd^B)gfztHf2{{S@o%SHC6 zm`~Y%6Yf6~=>0RV_4JPLeYlSU(eR-Zh174keqSB`0EI`z@X_!d2Z5~B-5(3wc-Mxn zauJ$SaquB8GS6y$SP&+ct?qbgo#jiT!Sn?CLyTr)g8u-Yk^WEh4SX3UpA+-^AK{-z z_=HnL9t$;%rOWVT#xa$0Rl<{R=<8VJ~&?Aub*##hXp(rVxjL z2dx>Q4j?>Psoe2b8otr-@(@*ua^F}=ivHze0|!_r@F6a}XM->3ybt44;jc;6;PgB0 z{{Z0({{VP@>kNEoTc_%nMZJjW5=Nso!z9+y%<6X8NEEQVk#?*M&(?3wIJ}*zp{&oT zZ86EB#$~?Z!4HD4_(ZnWPGy}csZh43M7TZ4q-ZA-=s1i&%!(p0(+SFfOu49FS!~`ZCzaT=@DD}RXBmrMcZiUYpEAdPG?I{qHejn(5Dt_%DWoyN9&p zjAJ4U>JMNt*IC#*s3?p<{!`Q<+sx0hw`F)`Y-A zq$3lPoa-<~B%}qowSjsCt;G5+JTUVWYxQ@Qzq{{SXhfum33Xv*QO)o05Ov=|L!x%HP$@qIvS~&(B%E-FTN({v7v& zg6jiG-}k4BC>FShQqNMS-R&4xqME>fWUdY-7#+&4^A_2>lt#-$wDrl%ZG=n|Z*l;! z@Y8kCmnz81Z6&}`p~wcYru)Vytt$qT5ZY?d0E;SKy=I}R(Ue(@LJj<0<@K5gE6J81No7{7*x~+$&QmKQgUxyUa8EMKu*wjYX*4!CTkQ zn_S{h)$}3lzGA6%8t)54KbOY`dEvfym$?4`F6C-FPlu(UAL97Jy8i$dhq!+k$e*8| zk@=5N2dH14LRcI7MRa~2NS?gbP@L?w50Ap@2)P&gsXz+7EwfG z&9;vsEpT~~heM=lw_cFy(fElN05424={Stgt8ut%eLYnHU_CbjS?t~rs7J-q_`(s6 zJP8Owel)DjY3yZ0@hCTU@MIb>@b4->yYTo4iCzg;YQ)-Kkfh60Q?_Ydki`fX1Pxtn z2AdhEJdqc0J}Umg*y-Tt%J%Wo$Id1`3+u7|C-_?Ke?N!1{!-@vUm6;xuD;$hL3#t1 zX@R!pd%aiQ8h+5j>e>llD$4RSjHtpBPoKI$h8%`H#B|`rfQGzch?yBjNc{PD4ruCdZb9<_FTZ3ZhzhN$g5Zd@*yd73&VnptmDKAj?%6?C^PC~`6DMP& z35A(noh9@|XCddjpdsCuOgSB#)JkJaGW-t3um!u*P9HNs+?>7|W+%2_Q23h|^oMm?h+@L*MS+B-Qcr?R@sr(whkFlKDqOjL;K7o(%v1}JA_*}5HcRgE3FB7ffp(b z2?YUs-w@D3R@K9KsM)I+48cQ4twP;?W9+k2S9mwV({$!6RCYI)o0phu+DZBpAWD@S+|}YdN%#nOdkDu$YeO8Z1Yoq-;$(C<61z`0 zb?X>wwPq>0<;K$CRmENpNSV}8rPa4ddl^gh^oxkKr9WKFjNXmK*@8K^GnQoVbBU!; z3|oNWtX{<2fdV4kRjx4j8{B@5vi9x0MTT3^xZA<;HSIf_KDmT3muR{WFTe9&3*G)` z0*bYH^?vW9$vQ7$E%u*AQ-%7VR9K*G85)=`m}8CQ!{oH^qYrW@!l$j>`u^qG^K%Z}ZO_LxbFt?L~zgR(fOf|fxXT)33c zZ_Lmb2-Z3{6uQS3YK$OO-3g4%aYpkO3A)k-tiwyt?FQ^o!v+0BXlAvOp9L}5BBq;L zcZy?)w%9rF}D3b0`KtxwDACxV1#rFhrU9EjZUnPG~tBO?8M$h9LDEpL^MvYNowWc52u(?E>mB zmreAAx)P#6QDSV$6xIv#2g6y6-5kHUk=)vLGyYtLIq9+Br8l0YI^#{hli~{cbw(C# z6RqB_S$;?9m`HR%jt&acCb!xNT^RjM9Ed+QjS36SkScmA;l?g1T`Vg6{{T{vxf^}O z0M2V}R|no|SK03dnCBasVQ1{u$&B~Jhh@J#Gv&Gvazw9Jvl&g`rL4i)aQ^_QSb>LH zi|8(hv8V|u9M9G@`(QoCTt3sHFmK!afbxy3(cUIbG{zZn-c8o=F$9WVP3Dg(Rm?2L z8-c|IA~&J!FmvwPQ|THPOM{x2Sp%xQru1BsDJ%_gjb@oLMxbKoQC z^-L(;0XdSm zD~PnP>=^Cq(s>|0B^65P8!4I!+etzY$a)ktvgrm7v=-E6?p@n9%=MVy zd3HnvsmUmv#?p}nVehh>Z7q^Lk$uF&id*U(_GVorAgGOrx9DH_7LA*une;jnSGV_$ z)wHZifht#nHOIrM5cj4M@oB_OKYAk#Cy^z+hi}Z-R~J3UheAcijRgp2B@pMf>IgEM@`krtrq;nzOp;288 zVtXNzTp8A(fJKlW=9j#|Pb2^|Fgk&I zBn~)Q+&x@&U_LM#SNpKzVGEG=dU?z|Fx6!p+nS$8{(MRHoSF-DbrVnf(4w*IP`AooCfl8wq5NWL#n9L5Vu-rcYQ z)79m9Fa>{fz9wlNUc!GnMUjZxz;KT0~y*e!yJxbq-x|V%yBhY=9_gUlCZ(K5)!Rk zOoRC62FL8{(F3&!ZzGeZjmx?LH}HLjul&3_zsn19E-s!HW6ZDDQNW|DqbB|tYAbjU)X>~h~|Sy2W>uN&zbY{bxUUecu0ZLa#vsGu?5+lfNpps@7wiJw-h)1#s92W-oX z-(yHVG~CU6z+tk?AA~B^RINH`#kZSsL=`5J0D^9b(mu|>G ziBI41NB;mH>h*}43w;6C>wr<9e|Yk2$2wzI2oH8R+sa{lwv=r{TU|@I48q;ym9Tp9 zb!v*4;7z{(SzY9?pmns?I(jfx!e;Y2v&FChb+%)D;bzvM4^p8$ZKyl>N;N$)&Bv(& zv-}n4W*$k1R_HkxtpqoPuFicWM%3FL(<8})f}fm?;=WL^2A6{lAiGF?h3kf?*HR8A zv$w1&CR0|IulpLUZMWZ?7LO_S&%D00?X`U26?b+$JDyR{u%94|rh^@OOJM*w5?ktU z-R2AFY0pxlC^cinK^+)Ds~4~?mmEVPf{G1SO*8E_n^V}7Wir=M>jnh83o60y&_L*p zYT`^=$c^572f$1GS`QEMji;aR4F>Q5vD0~1EHCegS2J1YIxxG&_i6AkLHB5xTy4&q zSVbIVnJ439tpG=~m%| zU(;+3YfG@qSPv1Q=}7juTT_yKr9k*T5VAQq$~Iuj*^*deUNhIKVyfbnZJNZy>h?hA z(or!|;==i0Pg_O3rWH!FI&WN5P%}l%nXP@Jbr0-V*Ha6aG@O3rb9~FwNJM*X8A4{o zUsR4hHC&g{IYZzpgkd+jzowBM=rOsKLqy?-d{wCRg@785tMfE~wUSi-nT;zH| zScjy^(r2P6spQ2ho*AzO-a4jIad?$FpO4|%S2KFqtYCZHhw};@uKxg7Koiwf)nBMr z7IEp?)eVs7K*u*-EAw3-G+v&U?WAE8vJHR8FhezM{l}^bx^5kX9oWOp3;|Rs+u>8NOgEuFK8qe8s5=Z(UlyAE9YbUahB=&eJ7XCTt&Cm zXq*L`%RV>!{yFt}z~~)y9_jQPz>Eg!QN8HP@|NbT&7N?Jo46Ly^LyrLgzRUa>5U~H zc)jtBW+p2-fI(d&)f?l=9&NzYzRuVsIp|+wsyXwPOiJB$YHGf)m~$#;;XHXjO&ZyZ zj7{$q@h}r?Cu>RTDcbckd*I6q(Ro8u^ywZ@Fj5t6XM5TpZANUEpxEc+j3)aJ!K9>L zNOKn`dII0vvu-EkGZnQox(ro(V)9^O^)*&LQ$s_hAfPF_sKZj}kHpjrs?!QcZ0weG z6HxL~WqZpK($sy!)%^c<_WEVr>5J%hZ@S|Kz*f-Me_pO3lO}Sx3$XGN?AN} zP5VmVHradw+6|ScYFFYZJ&}V&XF5v@GHG_=ECuk*o=-?a3fYLjqf%C#9irrTmV%o; znn0iegFv1_`)9yT8&7xoi{SV$@MAtX_xkt2O7gV~|uR!9Z z!M3T2Y@u4S^A!cxT2oP|-F~LD4hU~v2){njl0~VSG!F0PQgL3rlJdusHzxCJR8<&& zyP$Jla|&jz?qAG>%O*UZvjR$^k|N!_(Oxl40^f;~@cTB_R#MqT<d7~RtW_NcS!3+eimyr|so3Ub334vlI1@P|!( zT_V9k*M9D?2&dBM&`M%&Ko9j7Ec)8@H4!XYRr>u$>IU(_z7}tGjHu&KwbllMqX*PY8IVvY9i~n zl%e6798z$D==wv@@TaEy2kvO_rQ1G(`~DZ_6Bg8~2o0}3fCW&E$1XVuwdF_5Ga zRZM%z^Fl!svDRC2=BMKsXI&> z40rwHp>;6fkD?B-Jp5UnuxdLQA@F$PaByToEw=_w1H)b#e}!HD0B)00rmLE|Md?ES z07rSoxYryKjWpV2ML`a+4ZpNvX)52C_cKmks#?ftbq}8MgKg~%lH**Di1lXeJ=#5V zzV!=K(3(6l{O%oB>%&q8RO<3C7t8<~3s!pPrZEu*BRtxLH<&NK#3i@{XumL`6k5x& zW4m}h$!dtZDK%Bq1rIm-#67%wApDQa(c$L@ulV2b_&jq!m!IU0UW&7h4m@`Xcb+`* zdCSTP?++#yw|NnVVj9b@rF+H$>Jxh3H^gH}TihKyqT)b1qguDc?{c5yj&(iYyGUvm zY*nRcJ!*<2vv=I-P_=>4Zm9I3ruXg=2v%smu^}6rhGYTbc00}iS_noc=8IbmmBr}H zNX**IMiTERlmNCO9;1(GYG1VchtPoRU#3OEj)MHmK9f>;6@9-`Pp`f=q+4zEgtvi3 z({8wp*I8mRoI{*54HY}fMXFLWD^F9={{TGWdHEXlmIq61>mE2xqt<>9sP}pN5fupV z%SOhf-2>KF!+v57LC>7bl5jV%?JMgBBW{30THav&Gqt&Ob{9k5Ta4UP{{VRXM5=8W z{LAaM!m05agf=w#%etXYOa-LkAJdO~7s ztgyBs5v{44PvFI!vnGN8VQ3;$6)s$el*AVD9l#|Y4xSf~x?+q0zn1NUQ+17JM zg#+@INEYb(3|iaT8uN{^Zz{c2^941|OvEFk*0zGVU)wPBu-D2a=>CtrqI)Q8ERG+N zvLkiB4jE#M8Em;F^q)FRRZiz#y@h#5If)M9;m$Ce&uZ^Wets6Hy)XR@XhUiFrD*C6)H09xWFS# z@2{i<)NY8yYb4dI0xE2I>P)nnJB%HAv|ntn;dzSCxUqvKYlxcHa;U9~-Nz7kkjM-2 zE6ywCVWqql%rxJJHP-XzE)tJ)U{DxOXOdyvw9{vSq%US5w0-mWnh^U_HUqHB?sI9> z(fl9dai0|S8FBZ&llZu5o_`*CL|!KHenXC@SwZ*~TI=ST?zXq(-f0)cUlv@+M*|xC zC565lr9fATCtQxcP!}({&(ytl;d_{xYwn-nsV34kNJ~Nd`;cu{V@!0 zbo@h+-`<*lT@&*-nLswYJ>h3ia7BIlvKB{0r>HR*u4uhfIcnQatgSAk)9pVlU%pUm z+(X*t7qC{kUsS(ihNs`+D#Gf@55I7QN5nqoNmIo|?X8`(PC<>mQko zQTi(70S|@q3GGn7F>X;fVlMke@k`h%K&!7!Me@V>EGq1HnHz2%pm^*#8Q z{8KmM`{(a0eWxGSys8BX)ABJYXcvu9m6xg13XjbFP3*X6%-9`ZjGv+1>xM8irjPV{ z$~bYZu&?At^%sqgOW#jeB~@+6dNx5V6MPSACC5Q+!20~At={fl@%Mw%Gl4pczqm9z z^ZonIDCWES=MjC3l&oW5Jcv_Md1`#w{KI3=6V(2pygVOsNlk5)EbPS=>qB9Fa;H$R zUA2gmsC5~L515kF4ukgrU#`qYYy1np_ZK-vje0!CUoPIi6EDXvXY~ua#9o(ov}qt% z)r{9RqO|^|amT*N+v>h0932}&%4RDr8gHcF5{ltkiP5{(lejh?cwsebF&a%hU)#=F zkh(SD(AQkCC0pE=e3oiVdZ@|o>jCN$^$vJfM#00(K&?C<;fTS4I?R1T_*l!A{60DC zk&nOpo+RJ+-|*%H>VuE2@P6bz92eAMsN!ON;#Ry1-O^IM#AYlz*G-@%=OUSJ*+ap0 zXF>g9{mi1#Sx=(kcvH{n!+!%GnE7_JJ+5BCR--riiGJg9VtcpzL6)!DnQL9-jS7I^ zxkQB4t9zifD>!9s>jpISOsdtVNb7Qii(fLI%=4TdI%AG^+4OS+uF#?D`-8=F)6@2v zk#Ey@^pvcwCCYR3t-n(D8sJmvO_uUNywBX+^7H8YN`lEkY)D$g?#tpk(>vqQFtlR@CX zJm0JR`Ad=+ZZH*(w;9gl8IO?W5XcMC($mS=>jJKKFg8HN zgT%{^bu$6ghg&9v9pLsHG&JigrgL*jl_+Se0jTc~zPhey_luQcEf#ON*K%CD@q9G+ zH<#XTiskq}$KaTD(&GK?&l~W$f6I7CXq7yTmha1HK(ebu1C~6`b`)nd8Q1OPhjTlJtm?)XWfGaOUx*&6^4aN_ zI+r*r*AWEjyYjz@NZT|DzaJ0@Q`P;!6ekV+CY0gj^`7I%2E<#YUwvhqCxPCu&#@z| zKs2oC9*n(XzF+IwIYq=jGv+rI?EJ(7)iv!II+;H&>K^9g@_;CtG!roQT>Rhec7oQ= zT_({7t&BLM!vGeYTn}?bH@`p9DKrPb1$9zRp;RLn&GGE_jU=$-U7>G;8jSx_Vi%4vYzR(x|v>_RofH$6-&%tGD<=cR`K0HmDWp$+e9Teajucx_;|53SC(t3J0r z=RefX_H+Gw{{VA8*3Prn9#^hkyVo*m^~Zeoa@{G}6Z6n~PrQqWr<8n{m9idUHoH3i z01}VgPrl7dyctbJUuM$XYuLXL8nEPkwLAKC{jOXch3`^WXS)LZ?=hX&62TokW52+IZ9=0~Y(!xbj?+NxEuM|7$}^8#$UPYE zng0MnX?dNZRXIkV;Lg2_b>7&7?UVKE{MKB*27Et1{_#yF$hcc2IwP$=6L|evP1=35 znl79l316tReMxIK*?KU8vupJsj-%WC5`oRF?QUATVS2{ho09r#E5x`r3{KiT$k~2D zdAh&>dEZA_f!!c4t&y%g<-IIrxJOYG>G&`hJIMkY+ratlDNv@X?mENTGj9AG`^AId zw*?z=kLozQdKX`Wm{RL!BU>He7bA6L7r*rZ7I?*;N=^uDp7E}v`&O6p*H7SJ4#}ub8OJnLEYu$EtTEb_wnIhBDO}X zB8P~z*xSwk!YIwt*LDt58f}F8?*(9d65N7+!*a8+gOv^(JOY^E4($upi3*0E<2!_`cr}qes)9uP7^hP+#Iv@Gmsv zp1Mc%6=vXB_o!EOBOeHoZZ_Kj%8{W{#t3^fpOBy~?)R$c+Dv+`wkfoESG=jaMK(g&|dq=>w`h$qH+ikr` z=F!GGGMj_9mT1AGKrEWIHfu=OU3ba)iCIdGr!^}{22{!-iIgQ>RuchRE2Ll&CXaF@ zSO*PUyNi~S?#da3VdXVDM%M$auZWqj9Ijy`^5M;T`@uo$E5eG?>zRoK(AJ}i!cRED z)3>7$3uQt8MR(Ikz4UnrTP+>r+4uR=r_>Bb)Yv z!{)2w_`k;EJ|Ca?F20<@suL0&7dchRLbVD%(hOy8@8(zz#g9`n%1dX`lQQ}A?Hg1) zZyjLh;%oNyA0)6)-~i%=<)KR>4l>P-%ESPdH|~nt~g);=GqCqTNr#N<_DY zu+p`y%Wx~f^je9Pl0|czc7y{%X+U-rN+3IW7tB^1)BBH^cIOdCEH6sdVj`&M*u1~eSg3zg!(~cz+_v@o zohA`T*?H=JQw23{y=L@lh*%NL=HY-AE|+SS#kCLR83Ys@HbHKisD5LDr<@BC>Axro zU;|o{ql5ndHhdpH@@)pR9rcdWIjbh=_fM1xl{FCcOz8!pV&{<>qsU0aXRv#jSR_c7j*Ah#$5qsOkM32Go=JqbYph!h3{qh((3 zr!kmi2WuI&Sm?JFh&r39KqZ@%6u}s~1R+G^Zw$|x+`%kjt2E!d4K8y0*<63=@cjP( z$-pk^&oqB>)Rf@P<`_)_!TE?fg7k1f3JV5e6-xty(i;RJiMsA^Vsxpm;#GlWM%y){ zyWA{);Sf=_Mf;wP7O#7Y73)+}+Bc~Cti16GUo>mbiQ#TV`V&|o^g~lCB>JreAoS9( ze`qIKt4&TwYfPS6PW4#5%eS1vRxp-A$4%f255tqzSP&g7mvg^3V!>4(EZT!7!!n7t zkBpiUN~qhbdJ@4rhj;gKj+eS$k}TX1R;1>NHD-}^(7UlNifLR$z!=%_G6ner9(NXJ z9#AYeXb}Uk%mPM8DsNIF-!F;M_w7H6;Qs&z!{GV<0FoIOAGAY3lqnkN6N#%0Ivrpd z#{?Zi24U4MW&;~(FWdn_#4+J|GN~XPN#(bca_dK-5*V?!RtZ#3RGZB3x|WWO3`bHK|wg=W_n?{fl1Bm#xnrfrjRjXXRaesj1W|qUYy0CeMoFGtl1Ec zNe5V=dydao>@sQairs;B)(vMBo~>{1`G%2%o{j1ybeNA{@$kfDct6kL`Tqct(!q^9 z<~g0@Gwa3q6JgFFxRuoD6!0*Z>Kbv#;#pvv%l7lps5Ft#EgM6-u_JC4rl``_q~GC+ zAbCTzHEmp({K9q(iu{)-eqtpWr`LJPOap9X8RdiIAqN9Zk2{rvg%mF2%3h5Jp@0K+ zwS&&1*v-b18nwJGXxm!INQJWbA=(BZn5m%dPN$0h0FTA<{{ScI^Og;p4zji$rlEyK zk=m67JLhefMnMxyw<&*LikuRn|2wIT!ei9-!0Ger&aS$71z# z2k|yT+G?+MO-daUEzkh^k#k{skJja`@0N{rOr!Pe@M^=sKawC;4F3RVQ0)qx{cT}c z1Q++xS0|GFp;3BTQzt)f^#j%k<{=CVS``7_31qg#(shY~*2#sK>vxMyL*2}|Fk1M1 z<9;iFbd(~a0Q&WsWCR)L%{}3@5q4;IoG4bXn|{2boaG5#C=Q8|j-S9CLGDHV0;94g zW>;g5+2%y~c48W@%t@gMHsR6`Ajs+VXA`T(lDny8Dtp{OWI=)w;Q;)740n!76ZK5h;&M` zm;Im>-+qaOTgagj5xa$c;P7_=>c@9FR4g~aCEK&j7c6^cK2ioUHcp6G`ld~RWXR2r zE&NMMu8M?!yi85(6C#`JUSKUd{HCFt#@rDDO(1IuU8=xuFIrC9=uI`Ky00B;{E1{L zt4C+YD(w|LraYp7`kcycre0y> z<^#_Zxl2_mRpp}>2*UL>hS5Q3HbxHgGJbD8%}AS&FCCxKE7#Jp{KRjv{ZjUWYqb2t zsyjim)*aLuE4|A)m@8?+1T$3aa((7zuoGE83M(ZzEr4woGd=qy<#+Xov#JJ~7q>}N z0NVh(KZ$X6IEVNXMM8s!Z;97H^DUL8S*K}<7>Py7fO$sMj*vQ))({PjYgqWNh0E|| z&x`&}v<9yC`jmSQ0%}tNZ7ZZJwR&p?qCN|KU}XXE9fnq7JZi0(oB=xyum&ouytOn7 zdK?oov>K&*l9C1mH5jB7Hvz3IM?ls-Srbn1B}5{y>qzLt63_zbmkKc^fdjMcv2Y^i+_=ns@l2))k||;>8#CU;Kb0kLk4WxQb|UV zh|AU`AtTGxX$*3hnn{G8gB`U}zUw~rSqFyAh zn9o_DHQpPg(k*|(FU7LHKM0rs$)fnh+gj%#PI^`O-Yt=)P{#&!r2x;N9Y^DQxJt@zHGiZPxYSOFxJi!$Lh7kaGbyb{c}?DMbEj2UoSh&)DQnNE{6ze)Bck#IE%|&c z>(&N>A_wPfGXNCgXL}iqglPDHy%vKq zFNLa@z~$4&if1-t5>mT{fA zuFiG;0F(9k!I#!lmP|yiy(2VgxP+G3p>OpBtEvrVU`c|#=EYjev@^gdTwJ4jjBiei z29!LeJ2kQ*A}E%F2|4wKSAyE4q(TO*^g>XVNaRNe9t&ptgZqrohkfTskhy=PI;Xp` zUlyZzWBe+#3wjtVG2$EpI-7QFf=j*)ptb5OMzWdLe)oU@+Q(e32S5D3`?gBeFU ze^^F6>_oEoD^cl#VMa`@z@rfTXM8Ld`9!_^#l^k(cE3iNrR6ROPI4h4xY8r%-%mq~ zHr5$AG3 zKt}Cv7m82h2ngQoi3zv>^!M` zuXJTAqX=>sFK{;LHtqBuI{C||_Gq_v=h#oPG&k>{HR>Kq?NEJL!|-$1d3X^ng6@sp zVZpd;y2!A8iEaAccX+r~M(+_H${-AB%(W$E>7%1K_T5+Ib^cqw@>i!((z1kPcb%ut zg!N{eJ|92mJE(ke?C~TXtia((Z}Q~Gq zl9l2|?m_`e4tvH6GkdX=hNvXImoeE2tF~^>c41HNvlGSrNQ%fm>cyB~KQyP+?!nS3 z?ZbZK6O2RiRT1koy-Q;xxjBqH{}D!Sx~;Rp5twls8`@E>pW6{cFVI!Ik`uD*?aCoi zCNXZ8deEVm^>%byTsO7ZDNs|Q&l!rZzbmK|8Ly6dXqTgS`&PRm!}dio-J;5JrQz#o2x9Sd1(C7t!g6CXWknyg`$yC8Y7ee*QB z48|tvQrZKxX%o$C4p~&e%b=q+K8-zVfksvuYzjLV=(~yygH?mbBR=Msh!8N(rk}i+ zv3l(*!?dSghsTU%yNhUYGj;tC)}_*_$AF%7<+uLm1E&jyfnjrqh0rH-Th!t!6sjhd z@r{@7==1XzZ(7f&J%2GX(q8l}kZxc817yQy&SlkUJ{9vc_zbm1JTP>G={$AEjE2fAjp_NDCDc3TkmWyi@{%1?soN} z?geyuQR)Emp!D7FknmFndmMf`-M%*iPc-6wvFh;To=RGsD{Jr3p_9yL7ro#Dmmn=I zvsuHcvtOZNta31qGgLb;wCmS zr+lB&*)(~yvOQMPB;-xaTJz(UL>(g7I8Qu%v?evC+~7n=Sh*?{mVQ~7>-VYS?Ckat zLuE;qYS8e4bn4Wrm&6TXYj`$)j-YuKDnpZ%;Gk3h3 z3W267!BYW5G71hd42FR`5uolM8p+Snf?-`Z(QuN#*Mj3h0SF=#fCekU>AK;`ARiPo z6u>fp0qyK)0b9YrNCX;a2LJ#O3kEO8{yo?A7qL`qIEgaUQ3ga(1|XQxQTnTCG7Jjh z2@<1QwuFM>p%B1-IHRdseMj|jOg!Fu00kuv1qTI*l^TXX2_bL*240H*!AzCVdumSf z(yuP%H;JPXs3rmgAB!^@lr0qnCqn&|A59rRL#mFqbQ|N4Uk5V-n@|!MK!j|b4*`V) z--GG{sSp4_B1n)#<3DV=pDpLo#z*{KIR)U@T;W7Ye~k-r{%cAks0bWdmP2!;S9 z1dRxQ9-;&UE0J_4y$y1e`O<42CWPl7fJ-miG+A6%$HiVMG!O} z2^CoE6osO|fk1iwgBt|&LpW$zA|-gQ^)sB|34A26vXGnc%<%&hXjUK?L<}221VF({ zQn3*rszuKYLXRbr&P1+6pDVp>H5vQsGoW@LAj!&b00}2Tf>8iR@220aFCBFq)A{T( zep>#k)1qgj3-r}s2moRWiWmAfM&QI;5ubb^!W{y%%VvVXV9Bqy+``5QBwk#}AP_JF z=+kImkig)!vV(1cFEgL1by?f@M-J*{Rgryu<0iJF-Zu3;|M5f8#k60F4JH`4<@?Id9bpoBI~-`Fkcc*%$#@9Yh24x&KLqgo~_x^59uS zt?>k-5NHt4*TD#cC;yup=#;4_5+`=o@mMA@kN^Tl19epXpM$1?ZUkdWMUwX}?9+Uu z1VKZ?{_A#sk@@coL8Tz9X9FB03Y@Ip&C&i^0S*Q&=wAOK10huggZ~$4Z1TMlO)v~p z_CJ{X|H#l#2mlFWA~|oJOyg0YBLO6TV;Ge1Ukt&iAy^qi00>qNq=3OwkpK|{j6?YU zz$6tE7(fJrWH4egg0?`Qq2OR3{YyM0P%zNF!D)iA7=rmXAhE$I6+wjkZ!+*_Lm3K!4?=;4 z1Rlh||Ej43{7-Jmpk83u0{}RMVFSU6rJ*PQ4Fv_m`(J}9!$I)>x+@ZqjEx{cvWJ3z zgP1`62N^gW0LdV9FhnvaJSY+b0F*#VP-et`XBhnVtN+Z}5I8tNfF1>o7z+MM{hJK* z2LOTq{&ItdVk444H6hSoP=LPi4`XRl(;bw=Xw`D`Kvv z-X#yq36y?!pgSp*O9D@2L>k*s(Ku+#U}#bE}@&|Us%d%NA9=#hBts>Ov9$=%$GpZt?CG)7yy!@C!Q8ddz(YNHKV zww5_t;R>snGtXSK{MbQqk@&Cp=5gqWkLg4`5iMvxON2u1(^anguN%miR=qJ|=5l58U|RZDp(^Rngt)3Rig#e`FJ}~r2n_l_IEM)Ym5F4!GFgcp|nYJF-m;U22-Z094 zfg2eX&0Kzuv!ZrJ7`=@eAaq-$?j1bzn-U(D4yCiiXQP*fO|wsAfUD~%6)~O>JHk)g z0$JBjr7Ul<@&rACzMy(ZTq1SD-0+3aWp6zr&5e>VuZnLt19J@7y;rWzg9Y^l)NW81 zg+1+joc6#y-$J?_(grLzQC;=&jSs1-XeX&dIrFAbrSB*n!zlzu6x=GtvS-f7Ie!S>WN*PIXO2G1YiE>|f_Bl{n#Hf*=I zLaY8v{-Mlx;%^Cj-uf-3UfBE&7*itW8ZmFR;GJSELUqvDkD%qh4h&L(L`vM>%5uf` z>O2+O#I8|jc{;@{G|xAvW+X0>C*p0of7|ac+1k5x#b(hPk4|zCrz!LK`N_vV%2MIn z-`Upr^L|0cyH&3TnEd;}q9Auu!PgH_91(Vlv!Nml)5K*J~qhtP~Gt zpW*y6bJRHLUAW5AS@)8HCrqsrE==&vS6N-q1}MPP zqqi~~OHwWhnuS+2#*by*v0}g1&;3m!<#n0Pg&tmG`HXhqr~A?elG^tHm2I?ZG_SV4 z((=FSx!rsCZnb+}-!KTPN#s|&v3J{RR^O7yj!w;2?v%biv&jqbIxV3+gN!B5HlwkW zeI#&ZJ(AF7$m3GoxpMDs$vS)OiSj(@fwZusHT_zCPM z+hkV&o!H*`@p|znQf%t6h5vO@MIiO4{PA5QarRN=SF3*J%%2-=d~&4xWoj6DjO+2<7ai&iy($b9M~V-BzRIQZYx3*5gLEoE%Fq2Z z?4B7|O}c$yAEv8P3kw6&jO4_>w_6;m%u9axl*Xy(M`m4b!QH^IpC9e$jA|)PY zE!kcYE^yT4w)(#bw>_ZEqUQ{mgpoVSop=rNwKp719M#Z`IJ#|qw%TzKJjrHdV26~- zvfJhuwM33LJZD{=Iyezw@JU;bm=Bte*Ohx^1ynaCTKAoBVUFtKo9$T{$B)3~C|+bX zY|dP}<}A<7vmdX#nkN(xd;T@?T3pqw{lmUfzp4oSVZo7=jk&W_FTaCUfuBu*$<;vC zJK6ZBI`>tVnF5`d9kL%(Yv@INI@u4HSItYxt)QQ1?*mPb6Y)Q9Ws5z3_1GhX!$6Vu zcp#dq=47lUkRSU!GddrJOQ7a2)04*m?RsvjdgQ0#1sAa9u`gm~`rtH{pz&@EZ^uuX zFgm}LK)N`MEBQ@PXKU1spX=gx;QdEGmk8qdi&Y29tCJJ{MdAzTXLl~d>*c9jub0bR z(;7LrVe+%@dxVkT?%%;}*3-Px)AEMlBbTrPnzlPaKdYI>?h5qs=rkJ2QS#4PnF;* z*nEhFQ(h=R^VKHncS2gp5%16np2Ym~U@ilqJ$9QBk3RZ@G?yz;Ddp1In6$sj{n^uS zI?jrOZb_Uy%Wq5*Ms520t5)w6E)Z9jCrX;kC{=h+ezS7Ek+@Cc=LkC}&$d>Ao++=# z)e;sV*raUQ+`3lae%GOM4Nn$>stSp{p`agIHnDced7|I-;9QV#ENGs%jWBBu~ zDOAvkG&2>I^q-4cb! z-Nla)KE=2z|AuV}%ntjWp~romqufI*-4zb@wyQJpX12tWnfT@+DnLN8$F7Qzib{ z^0|m)|D}J?_+7H&qL$p7QGaZ4iC9dh@6KPvh+yle2`l3@gAG&aC-2F=S+9{xDYkby z`OOzBE%Rf#J%yKe%a^=ED&@bpCBB_IBqJ|Mlh><|{72cTwWFd+aM=uU_Jd+hPaf zzx7hIX6N3>!uQ(wsK4EOsIkvT19v3(6t`&+?1D9T!{WjHhjfyMH2*_)S(d;4jMKX{ ztuL2C?ht(MAL9fQ7+^E$su%ssU}vMtVP?WOyyw*DvRu`u6pPZeKFL2(fn^&$rU&IQ33LS;f1dn)GkYW_xkgAJR>{Hj^yk_S zLIppp<3EbkQd@+-#wAN)tOE*6wHSv9=F$w0(hh2CI`sUH?{*;d8j1+akZ_aL;Pz2k z<1EwfM#;c*SXiM!hCsWMmHsq`tDb=Y3%&q)Nz(dq$j?m0VK&J6*Y5$H2(T4G6esJb zvBNndS&3D7Y=A*^`XqKIT0w3%CU;c6kJKaEh}J=&Nkt_m-7VHawmaM|$H=HmQ=!~R zxRK|if15x9P#$|3IVd7}AUk81#TykMj&dY8>~+JkqjlSJKE@jOy7Oo(L*nHd&bzTB zR|ndS>`a=qNWHeZq4e4U^Y)LU=QAsBr(VnR#j)GDbJ1^L;Q*2DYAm|W@KeD4 z?e$b5@vdL@kwBQsTaG2&LoLZ@)YhV;w&6jkSXh~Vw)iMXA{icT$Hn6sWpO1oh_z(1eO(P?w6*vcbr?fF!@CK!VcR|KaBNFrr5kh_{Q(qP_U7&d;GkLj4^JKPl!3`GHwR zMhAriVz(IOhS*ZWZ&<5s%I58U(Q2a_s~9;;^;WP0b34JP%jQe+?cWZ=zd zPo*Z%r0>lFd+N5v4I*1J4p}Px$#ZP{cT%jCglxCb2iI7aw88Ijj{;e1Tl}&PM4pBP z3Z)BH$0G&fu@E){$d&uiApCZ5X#o}bu|7YZ0k%*4UCWb4bO?zX)gU)DuCLZw4s-WT zi(#dvVZmd0-XRAB+~1Z;Wri+Ay&KzPxy0whv-U8I)XomBGnz>+#a=#-n|%JOnYVwa zB+Qu7w+p~;qKlnWq3EC!Y*d>Re>%PPuAK=V^5gc1=7A*b^^XyUs+RrVm+7L`M+Vcu z%Ods)vbAMcXrtr;g5JWh`nocZ-~M zz9U{zEThWd+xI!DxNF}DBCh>azg+qE+E%w7%I))fER#S|ms`(d7hd`HTWmc&Z&_K1 zodX+DD2Kmk=P6VUty{`^d!xet?puJ{s$E!0d2>J!A!p~)q&c#Noh>$>aK=x+Ma5LU zAHV+HxSpqo_~=UeJceUu8gEVfeMbSg)x_kM@fjuVb8=x}kuJ;=$2_J?wUa$cKjPQ3 zy)UhY$<6HAsEu2SU!4gm`!)FS&YxYH7YaE1`D~6)y#?%jTE*AHnEwYT(k`m5*6D3m zzd3rYrh%ccN?0>z%^_{fI>n~A>d>hvZEP}w-p!!Y@o0}@vQ4wnzbw1HwKXYzv`O^w z-Wk><-%YNIzxtFn=VpL^uY;O;qsz~zoWqx;=usa!sjp(68M8iepPvZzUdH41N_Ry&!|q9kLxQn`;K^ zO~=P<{iCR_PxmLUmVa?Gd(1|LeOAx*11k<#t)EGL4Qx#5Bqj2?azXcOHJ_ws407)j zIghtV3p>-adkf|l+F&mPY$oGh)0NH8J_$U}(GVZG4zsjt+SMfxoPWO^ADoRR7WeLh zqLNyD+nuU-nu?Q8R=jtFCM`_7VBK!$@#4x=ZiP3F+RGKrE}Vv#{10gYY?+sI3g5m8 zQ-u9=KG)xV;Lsf*JUP?GJ8sKDZItp4&^=uiRvfkx3dzg|YV~f&Kjm9>iH7|!XnXy2 zrR6%Bu;hUD(&J-p*e`-D*G>P)Rdbs6>%7X~F9(Ob|7UoUx}pD+x$slN#OzZm$Ono0 zP5i{MkxqXK#a|7x9|TKW7wHx{Vhr^X3K3f0PbQ9{Tt$D=yuOB8QAoee-vyXxM)HSQ zoe630Z+p&NrPXz@ZA4afKBLXGFy}5Lpbrq*;AcU0ZO6klPTD2kAA3_oiszmOYgX3x z&c7s<$Ps?^574FLK{0^4i|2~(x|Xtyb?)`9oLx&t1Z&1&>#5{kpNBC_Ont1PmT#!P zr)as*hu5xspjF5nyc4-d%<@^gzt6l+{FX}FT4?^krm4bQ!- z^4B6QUB_BElbEIUA4o+1njD^a^rT>{5h%{=x*vQ{X-S9S>}OMDAZrf|3~jqlTO zVqp<8NZbIu%6SCpgK|GiJyv)EFWlSii+9+COdo1+IVFmI z&uiMb8aDV8=pnC~K5RS^xT=SKw6ri0cNu|tpVqqHkN?Egt?k}40##zSkFo!%+O3!G z(X3jw#MTHGcG0&A5wXiyCohP!v1zlxRXLuP>}gg;k(*sR*V2?q9-2h9LtH+Fd5-6~ zn62pi3u;@h{sc43ryE--zngo>Us+J^l$4fW|6=znpW%JxxqaO@Ug*?~4{v*LeGfd0 z{4fF(PpWRI*6Vyck&|1?31a${_N9QehbBUD#EUJ5t-@dKjl~vjNz#`(F~f=6J|=hW zhnPsQ$AM87R`_%kFO{zCS0(W2m=L(LmbCml9`QoLHfK&Hf~UFq;&pHKN{JusdM|~n zZWd`JklavtWKFeKs=xT8!YlFc^bb68X5Fc)z|=0Q1FK>Z2lGR*MNsTdw3*>uTlGjA zD@6}OAM}MjGS0SE4Bw7_fSUAlu|WIEvP^uhbjI(uP?nNph7^CW1XqlcpX7ht+UfJr z(2V+&d+)%9J=m8b#gao%Jp&z|(-7#PQl!&7hd+F3vvRXM$U4toDjVNEaHpB{$QoI~ z@3-Gz19xRyl^F(!J#ke;V41y4XefG}q^VNGM)di&cg9TKK7)!xcktp{;%%!x`9ir+ zK#c6pb3p$fNbFC&%t+x+dBJQmwo`eF?axdm{{W^B(#4**-#;O%m2zk zaFIxnatGGg^ldTQz5m|g5NpDBS2kA*7BT)?xtNO>fS`j5(-4gRwJ!bNR;Iy)=@4*b zdW8tCVRG`nSEj*bY-u97g9oFhzpq^-=-^hUKp^Zuo zzs1)y*8k@}H%WB^7^-8xHgaIaEi!7PSnieueHMHQYMR_e+E26eObVQsXxm(6IgChF zVll#?_QC0JPDV(&Dg_Zj6UP0h!imk?$MAP~-q)hIiR_2N9Zg7pGU&t`!b&EyG`wx| zlZ3Xs2xfvH4e{pVCZ#e@+S`=2mz<_zB9uLChjt&wfz8z*xLQ!Cc%_Y<7@5O+yj>Ep zuw44dUnD{d@7wx%HFE9bA_B+bxw-kt!uJuj+TX$m$!_S~$@(7mYRXQe@H(%Qh0RXP zP~Pj8X;9*y%8LfgDM5y6I=d*-IKYpu3!@kqHlyC&%L_D9UP9 zb@)|m(+%iwVO&s0-v)R;>h|#Vuu+p8i|}ofq7t%(x z3xfJ6Bf{Wlo7<7Kvm}}xxf~0hypQ91ArbCX@X&sJqPE`b;!hlZVg(t|EhzOW)!9Q; zf(Q+9tm?C`{xTox6fMNdEwzB?F<+UW6K$7u548MN*O2}^m))A{^~WN;Bo+2-1(znh zPQ4+pODQ=d42Ea_PUrB2ZEbRJi~Mi9Xf+iLjAU~o((3Bl_Bv)=d8+qf)C^sEOnEg{ zj4xkDW$N~A`m@?nVPzO6twNEexz1TFaz!!$3@aTtYM(1Fu()t&can!^RFR#Zf0oF? z(L>{8y6#$G$=lF5n`q1`vW7ZRj;T7iUABFu&7lpNzE^5w6}%cdE5s&dVVW;2Y)oHb zoKm;b$5i*uGL^^?huz2aY3aHV)hfsibI7c?ZDPpA^{@dZ~FzdZ42b41#;AYD0P&3t8~?UF1?J41nw^pPI5qGOTR-c$((Y zk9uClC)fpd_pi$MbLwD_&(~g%M5-qLif-ro2f*`A&1=jK{-h)wVa1%|C{aE^kThJ? zeT_mdEFKPovm;*I+cu9VSI~+|_;3#@o{mn7X*6oJ{B+eNy^f z;*O;al|}snP$s)`pj#nz?TgrY>dDO(IBMJjjTBOEzrN8{;?-u&Uf-R;@0{0!v5tOF zs7qg_OqMNTjXZ|W;`Jxg7KeD71;o*78e*<-;haKQ)r4T^bdrbf&GY?Sbsk>24rC~q zV-Q57V4?+{tB3D5P+iN>Ozx(%6=4ncz`w?th2s!0%1z3+Mv#!*#JIS)Bp1|N(EG^Z z;P#dC_rnnK#=}0; zoUr@?p}C7uI;9yRfQZzJw-ux}rp}voQ2p(RT@yJZRNi4##A@f3mFz)Mb{v;XvFUAiZFVHQ;;Tn>@TcL%U zXNz=CdCTWWnJ1@PwO9TYXk()i84h|>1Y7W%rs*ceUL+zDUO`XFN`kTC;JswsnXa5B zqDQCPz|psq#+jhQN*|SK@QP5!D+zxhR}|0Zl2Y85AD$qkDdLjtUB$kM(k57iUx`0j zs1G}hWLZ=w{Rhp5GAVY2drHQO2wjd6tE~hm4 ziTw)ik-J!XI0iGR7lANNr20px{hT`7fs_qmUPD^DP(8eFf)5w^9NWFLEgI{o2XOXx z9&OSIR`GN_uXgu=i0ulJP6NPeWAz};m@ujm5~VC8rEhuPYqm#`T9p$}9irE8XUMRQ zJO02}ruNI$qA5M9Usc}&`Grsmw8n_QNLGXa#Rq_u0@g*ct;!lug{VJ#RM%mhvk0-E zUlL>Sm5_5<4_`ORD}30Vgdw7^&K__nr1Y*Nh~ohq&&+>5!Z`LAee%HqCH&r5y8@js zQk+Q{AwuWLUdPEO15b?BT>_jEl7}LSbE9aPFv$5@0+g5=6BOQD$yfBx_&a2uu^h=n zh)%Z8F~;HqhfIi?6k1e08v7EvO*4>$7c&^e^&nZXIU@Rqp4hZIG)m4RBx!4iFD?8{ zT*^5{##e{5^RV0|qOba&>q@BvdaBAa+{)gUcSm!&Tf;r%sJOd$#nRf@etWmu9O_!` z3?eBLO}_1%i}n#l_|b1i)e(?v>R21PRLZj+t~C(IrX~577scak`#pdr>PItHViNtp z^s$3qD422Vw1mBRM)+D!Q>@`b%dSvZ+k>dkMO-hAZo2sW(i9Au#Mm+u`lul$yZ{D= zN>GPN7%lIJ|Mc^3Uvh6B^2YTGVYhRA0gF87+%DMGW^fV*6y~!()#o|RQCw%MqmiiI zcj}2ndyjpNUVXf?OC)I0`|N|o`YjaAWKq0?>!$Nj|3w6R$T=suC}MS{=mLXeQUsmh z>9if%Z#3=^nhJMz0%OMkrD>qTIU0|V5>F9dAwmqu*r+Q#rXUb1(6Ej?6)cW(ay?kl zb8vT90fJq^*23?#KIeT~XYOcgWjB`ZeZ{P}T}_}rIK}!Na4sy+$|TYAhx0(Fh}nr` z!}kjv$~cL6V_F-I7-_V^hg2Sjc`K%LJ*AdRi3#7#j7xoyN4Z3vToqE*T++gXht#>1 z-FN`elManV(^F>@3+ILZc4;FYsB|=tY#xr52z*BMQ%_F6f%29^#+BZmVZ?W`}fIn2_|xWh+mTU9{~_BfLHI!(sg3J&;@ zVM0j}8;n&>2$TQYRka#Lk||&zz8f`{hucXI3uAo|eqk#QrNgE{Lt{dtL|-Ru;~l|d zyk#^f$!3l+*sZhMuhq#|5o;)a%40GG8N%+zqjlU0Cy}SUN4tlgc7$~YiEaQWT8945 zk&k%4uP~UHSPCL~zD(-|7psvV4Kl_Zw|`=hh%?q9cO9L~XBhzYmMA*7pjTDG%cp)57W#ON(jvc9v<%bM-dF$dMHhJzLKPBe25-O{tS@<<7-; z(^4M2AkHZnV~5nmeA1H@ukail*l2yYmIFqaso7${<94Hlx zggsK8F}N>hM?(xw3E*KW#l_b~G7*LqWB1o}AL|$*TLvZZIF>)l>X}cd%7Ql@^6E|5 z$S~7%ApIxM5%o1l!yAQ|7+#0RMEvO1*|8j{`No&s z1WCHyaK6mN9_Vnq^}4%)R=kC$#G7<0(hp#r*I-Y=2C@?h;su3%Osa+Q7)8$R_SJTH zJmhyzQP>eR#cYNIg9Fc?VYJqoYY54T;CT0VnM$~k@tT>)r@pZYw`zXb1;&to`0d?7 zH?QstZainnB#V~oZ+aZ&jhQW*j_vGIVJgtx?m!sctQv$2Z(e2`FTnxroR zzcf5O?)fXicC8>^-!5vcg~fZ{kl@16(cd^yVN%PiG3rFUaZ1QcV$I|IS>4=2uo!`k zB}8-0{X6oY9!X0?*&Foe%Pg+L3&#q9Y%c<$?qEWhY%X*gXRKElTDZGHI3e9t__FP0 z2NN0bm<4_u#=@}*mgxoDnTIzI+HWhPJ;2RBD_!469cc2J6=k;hg2@%sJTdt z!m;3wHVT^f2~L9RHC~wCnw9}BKlg*zTKE~$9E(YV54%}VxTdWsk#e@3M|-i4~v2|k(7 zo)w%P8UASB?k7jIe2X;P-3LQeU*y7BC21|+JytJYg%k)XSfTo2Y-Ou9&?t}Xa43M8H0Z}cECy(SDcMl6kv1}r~a{91{dee5zfq|rOTSRwyWnsOF~NN zAs@pJn09lUr=a}e=IhTx@z0Ywdg6sSBzH9)S?p339U3EntqHW(Kk*Xq4t0cSc7J)v zfDg9bgmh{f>inY1WX{F{JW!a7-YV5`VQud0R7hgRy@+yQiiwmMbEg|&s3>#i#s+qB z+0!kFqMG>E&b0(J{&dsn3D+Vr+iT@$f0)xeq5GVrgFmAd`0LA=e_qS!G!m@*Fu^hX zfDsMP*P&w%E9ttH>q27%yi&7kqhH&+Y#=Up>c|irEGjWVkhp6V;2}i0jS~dFB!o>d zt0%!^E(UON19tl{mn4SB>V|R|*uU}MopYO*Qj!TEi(dVKXUGrCP;(S~ey(nz{7S=e z1(xn5`sx$w@oK3R?Jgi=uwT0GgEMInN;nz#aeduo z7#pm_v@Rmf-=H>-{O5f^$r#Rx7z382h;hj=3Jk-Us*%Sm@>i(TzfRnUMrlrJ~_>wD7MXeg58h z%c)M$bhx<-A^M3(USm0wrzu9G0`Kz&^yye0Op5vU#2WO*nq`bzbSTYUIzs382ZptY zOy4TH&{Qyf`GqL#8FN$|689n>-77kmj(;ANp-6`Z#RS)&KZE>ygGORdg#YyprvXak zI#BE&+1ADz(lOSM#!FD}AV+L)X{a*TgI-2>e8^ifb?r1{B;*$?UMm;#1j`CVJN-+u z{mpwhOPXF+k&U6>0Y5#kZ*us{G9G_W)3GpZdf{M%W=2W8(jy`Q^shi|tr)sxvK?NU zB`7fS4JVSqA$Sf<;t4&BRAd`;J&tB{idTohDtVE`zDx+8y$bURRPi}+MY%>{Z;n$s zcJD17fUdDzgdU=|exJmyQj6(YVDrGtaaDjIKruKPA)0+Ww0J6Y8j~_gyugNpGm?2S zBNQ}P5^8v=h~xnN(>)iL?FM&|F57k@@8K>-T8DUU=v|v9a_aG(y@|#bFXFYUJY}nL zmo^v|?m$OYB$Y!C^pMmoaV0t3UA>JpjKG(E@U4-xZyD|d%UN}aoJGh1*94Z%9P^5< z`-qcVR}m>H$O44{?j|?RIas08j3&U^NgPU5-X_k5hwHTWczsLU#$pWIG36oFofeCx z;-((7Uk3IdFqnpry1@Y92H8hD_lZFXVaJO0EJUk&EO#^!IxAmb?Eaqe*1$Izh;{ zuSY0F%zlM^$B(kJ0>Q|Nb1e2zWrRm4tUCmWMk}-*X5iwzoXCQQA5${~1nbj5i3{D3 z*&%gTD^3sTHG_dFc(k-}B-wPg(-aaBkA{sWn|YCdO+T>6t`es5G~POWOU{FQkm*?y zE)8GhnF0#!4aR`UkOo#C!Z$X7D7{_(c+iGGU%-b0E#+LiC=VM8ib^1g1_JaII0d+t zE=YJ=7?ZR)qeo#jBjN=i)>Ylr4c3~cXO{8>4d{)Eax|D~7|rHG?AS&U5&QJ83WOT- z*w3CCCn5zUKGZML3iF|Ym_~Bc0$K>_2%9EAKRY7k0mg*gG8-~pLbld9?iNnQ z;e1k~bQp&SrBe*o6>`0Pl)%4=nhF_0&k;*V^wu$`L^?p<7?q?jXDP;XcPE8Y>ifYY zLuiTse+8RUe%?^P?Kzb{igK`@P>D=Hb<89tsBQtt&nGvv5Dp*m$140`IB&U3uL(ZP zb{_oG&Um`rz=BTnr)34I!u=Ts^9*lZG=LOS6sNoRJ#t1vjooC;OM)4%%j9KGpv#Da zn6DL2q5+?B7pjP7aEcl167%&gVR<#q?SaH?WY#;8VZ7m58V%HkZzS;IkJME~TM&My2bV`<$zu3)dn8E?vpaq+l4PQbA{b#}fOiHT|idR@%?I7&$@+;dRf_rb+TV z+lq6hCf|^iC5E6Y9sdJ>&2;6;JttrMxT4%eW7l4bzlaGgvP%pW?&@svl{EufLqwt% z6beAHO~i;?UzfExT8@F)n_$-Ze_qlVy0o}?=l=3ab3(-Lm)XghR`jd364C{2mfVv( z(=gl_Be@QE9^kp9L=f^5OC4P0Gv7fCsYu?FIWEM*(O#SI?&a_&4VFN|jJHn}@pJR(H8qDDJw1;wO^Dtt z>x!3FzKEX{fP1~WEM)A>q6ot8wiz)$$9n=^IJb4)FAB7F6q2nd+_6*jlviUmG8cICE z`Q0M=3n72X($J(eg#gcB0B^eP4-v<==bV_`b{E=&4_|a1rjmD0p=c-Ckhej{@c)99 ztW~Z@d+&oMJ+mkkTG2(AK5gnPdPSKQ0gE@5rh{SM#y$`(?xMSTpJ-y#SUNZELR?;=sZE4%j7_ID3T&89#Xr_ zXy5PnB;^vm%%wcL&iMzhuz7m>Woc1*SN+BT%zlr4xt_()5kW>NGr_i>+3Xj3nzb9c z`{mQe?Bo%zr}F-TO`6m5lr_4;cwW)f1#4bp69fvPS!Y|V0ZIP#FZr@7>J$AlzLc|Ukgyf!yEg@=tjI82ppx%AzroPXLan>d1 z?8d@?6QIqp(QL@1ne``$U2u;|&zBDAowv&t-sG3Hjok@JM5&)E{)t*eG*2ba=}fb# z|B7}yZ5S;$ZdVUUWWJ$i$?lL|jqV)3N^}alBdK$&-KprL_`O5EY3aG7UxC% z)}F5AW7(u*3HDfL3sLpBCvFt<2p#W~4+_T)>%HR>>u>!Bi1*SyBf6k4{C=J9nt!Yy zxkS|p18b!`dCZl_ukhkmqc@*5)$h2kd-t|s0YhQ$=~-58%Jg{z@kLMm0fN8hRL@_9 z;0G-!aKF1w>YKa&7){giB(pD{KzRF`+2|qB<8?H_emlFmz94`_YBn zn)yta@A+pFof$ur{nf@JvQMN2>6CuHwF@}yKZt*RIpVLM6EVxmf0CDfaVHYaZ+}8u zJR?bswQ<5h)u;ZLgC@`_X;;^FQ*_UbA?nbgud4oMzZQjVr&5rnO2P~04*{sR?@$+u z`4u?2n`(x!56k(WlP)j$9+S12TVh;#Semv^%e^xneo7* zDGl@W=m?z$KO+q3($gQH8Z9O1PYhr5?zIXnX0^oaKcR}Q;k6b|(ACK^wff+}L1o%` zQXOW%Aw;&k>D4=Sg^}CWJyKjiu}J{q8T|4)@GVrKZK%@7dgQ*bMp%vN(Q+xVY-7z> z-kF5TMQq(hV@*xnQJC4y`k&w($af7R^{lIqM`xt`+23v$ctl$VVgqKl&S7?W9Uv5Q!Mtt8|}N# zD_yUgiD=m_qXohRV}5;D)7Z0%IEkRiFq?YpNFNh~S4kQEBH>KvfYIc8wPU+HD{h{B zIoJtPgFCcu?B>a{Ff^?(QV&mi^tWJ!LCAczzeZGcrJTE*X>i4dJmRrcb1srNJst$H z?ME7sw>F=j#_n%SLr7Yc$6JYKTMu= z^(;@~hnp?ZT_2qA1hgGL9mrYrOP6mI?{uDH>j@I*6le}OqyKR>c1cLm=Jg`TujhTE zVH8bEDd)a~bhP!z_e`&4LiziR^i?`Hk&va_+{K>CZc&AMnLBOae*gjM5i7i~ZrNR) zg-?1aFSc@YK8;`_{fRHXaUrNPzwi4gGNLl{g>tW{G`cw}^Yw{8)=NB=9&^d$ zZxybO9}*xiVL*X+c)@_8waD~&{X#qS8=x#>S)a}R@8LvQ}1e{dm7 zAMz))yzgNVU4TWKw`()Yn#dVB>aJ@m07Q@Cz<;Kmv+XZk;zTLgdq?0|c`)xs%wYTY zd@xnA(yO6hs36X8BBpEsH`jk@^N5+zADh+~9C>iCiOWN}7ULK8snZ}_R>vKqEUe~h z+SAi(vl+|P7mayg)8jIAR#nmMW4KY!Cr>ISS7^lvDEFwHlMuh~Ulwc~kM;$=lXl@C z+5W1ypVJI3#!Y88XLX&E-l`g#)dvh|%m=dH$r@(m-zj{1%)x*DVsN7f+=?^pPk8(x ze|0$_LS4vdQ&@CqlR8Q9goakHW&L386`#}mC8tCs$#R2q^;>h_yxb_nsAurJ3Ej+F zT*CEw*M9)lOOrO*`T%mOubvp>b*Zrpz2pmdmY*`GP?}E*xj9~YPexQ}R>rBBW$?#X zVcRtkpRyYE*8BNQl+`NwsMxl(u zgv~iLa@6YXWbD^_8PMdE_T_E>?^e#4?)duu17kp(zm)6F-LqR% zMkF@5obDYCVV7`O>1dj9a=-Q(#1EKtFjsoN1h7H4#|M~9TgUETiA80Z9X!G{5t<|p zUJy+nacOq!K&l0L9wXLcGh-dh-PL5NDaqPh{J?=gpeEI2OkjlAEvTwdvI3S{p0Qnq zCB>Hjt@b|<5z<*{l``^ao|A1<|?O@h2IkD z8@?u`D0|CrA)gFhS4cF!IBmLm)@Cn*aYj_aVJ{(l>b|j=51 znPMu30EDx4BCZ1M?n%_mW5n#1Lh4oLaYT(nZF}CmK&S_#MNaB9&v@FLwjsO;Rgbup zJ78?eaQo{s08a1UUBxKGhYk5Tlq@S(w{~;K+&QY|l`4zh!rO@@zfpRko=h#tF_iIkhlwH8n zYj}M-d4pT&H0S^TVSAP)uUaNnR6KO}f;5_b;sqO*H+>sVaitknJ$;4@l%c?8oa1ntq*EP8(HeZZSfBaq+_@eCKrRXQK<2BmP2CCV_itW zTj)%N2aE89Oc9H8IhNZ*M?1WGN3>SBt zMVxpawpVW0Ck9GAIh4(iEtCi`U~$Oa;#l8S9;Gm4xq*C-?}%-p#g1<-2EE3nE&Opo;)8pNQT8J)_H24!s&osGe7hBPJmOSN#^ zt#pE&V60tIyQ4fO<`@q#CEFhDngP{~IC!nEin#DOki0dv~TU%OSO?!nn zFdHwCLpm&!XegzlCCML|W+8?ZuF}MYfqHsH(F$qCrz6xsY)zt+X(Bh7id#9Pp}ims zRm30&uOU=43WjjZ#fWLbSo|dDiY^#na${TR0a*A3A+5?C^B#;;9Lx;Dd&H=)Fr&0w z?s2@%S-*WEw9BnvUe*2)Tg}VY+(hdO^4k4QxWueu?j4zZ=Si!mvvcG3J+u43lq_fy z`gxbPZCZ@ec16Jj>Zd>0@4y_rr8TzR=p3JjfS;1U_!q<+$z2t{#oglwti^O#e$_gX zkeh2;?j6vKa8xs(hORb9Fsj_8E^&*+B$?b zrf{Z*v2of02t|YoTkllI!_5)=5THQHjrJW?MzSUD&3`dO|D4D|WCpYrM{Lv4igACV&l5LqT?#>4@-9SsgJ`1$Rz$)1w-gENy+f zpwcK;AcL=|*XQ+rhBT`?BLq#TY~}@NcsPS~x0B`#o*R(Pb$(e^=3w6tTQG>5xr|HZ z9r>Hh5HxWLxlo|&V>KQK=X5Uw_c9MB_Z5GsQn%E#{J*&kW>+_;z1@&?V|8NI2#Y23 zIo0q=&<+5g9yhooi@I5r9343;6Dhh>4)8a!j<<-NfS!ewPfNLKN@VJ*3iZNkjKW3L zlec($%S20FEWu>1@JF-t9dN(r1h*G(@6!I^<^(7KS#Z1-rV`wsHcrCjAy#*z#M2@u!?iM z{fry}UPQ(W^H9Jw}|SLroh)oCAolv&}+P>sUPrcOJm6Rg#ux{i&KUZ4pygZJsI>stmN5D~7q5 z=vkv(9Qm0cO3$E5F@-3Wd@;EcDy?aSu-ehXF-D- zxGix=+{8pyA-O^G67-NGy#CC_v&7st9xUa4Nr8;q+A%WJO*#l*cwHB^fa`l1X)dX6ryP(7g{anQ0}4Z2CI;z-w#XF1k0=61Dqn!zG-6^y;Tba zF8F^9MMjxLM%O{q@5fEb0%2WHF1szjekF~!#CMngrxOsd6{=sauqdEg0uWKQ6)15F zrx_)-QrV_ijgx>qPM@D?Z>sauQTxwF64E1FhK~ab(oyaz>jz!ob%G!ZRhD~ZGp9Fq z-2Tv7y&?*j%i;o*sjfx8Y+b|DyAdBU^G{f=XUD`t%y#Lg>{4+V|gWVFx_yY3zY+SBWaWo3g8{5sYu)&cBtaY!mFVZ z2p?c7(eKIZLeP;49b8!S2WTK_mqM!LMFR4maJIxWQq(!6EZr-e!#`rr#M6k^7GMle z+LYMEFj(=sK*wu2bBS|?+tfdxl&*=HmxXIrpu_^Xaa=SFICaOkD&`b5LN3z}1Ds3( z-{epgX7-w1JC&@IP`xr*BDbB62j!@sWiMk|-Ll_WHyl9NG^J=Ybnz1kWg4*MIzE76 zwAaTB&6~T+9c;|XMvRrWLfpK2x@0wzpK{O&-AjU{Fik2@Z}IM3UL~=xRoeBQ^0CWw zmXH}%Yk`Q0n5m)AF ztL&9b={{3-A_~cMmotknzCXm~Y%>um0cJG~Y-X5kGz0mHqw0!>Kj??ocM~m#>>NF5 zZ>o<#lsWqk#C)Ghtt#}#C$dzr;qd6MH}Ulr3t$@qZMa7jE$YgUwkVailfy5cC4b$z zEw@XK(Uxrsa3X;N#&j3NOkTQ;u?$eRMTweQjZc~flfd!(V0ASlwqD=wBz}y^)apGM zddYH);xAQ7WyKPctQ4erkZexz_iap7SU4;=I0)6*5|zhDI+n$1)yu6m;nUV%uob%B zvyxilA9)GSH8bP1zG1i>B^=csnxY{A#t%h)r6|KUxi`>$!RkTC6hGf-Z_+;2hYCxRS*aL93W@SfYic zoDElZFl@Z!HiJs8bJe)0u!%!!0Q;$f+C5vcla9mb3tT*xz@I-504zd)D7(nIu0f%o_`dK#Or$!#r2qLlBW-%hAh;V4cmqe~Dhp z!Kc=vfEYC%u8-6ZM&fK<^8H*ShdICt#&Z-Tvrgd(U5lpAh~U*K#JwwLe=x!6I%AiJ zzGg{)-6>ZJ4NcWXAjoEDWiIjcFvFYrh5OcqCubd|SapKG+%&pQi1l@rnBsS0j^L(P zoPUKaQ#UP%6m*EJKXBX#z_r|A%*EUFoBD%g4X^D&8I2|cYv&h}M3=&Wr>pfxRjBz+Z$9A~gWL8rJqP%I_RrBLk@B&7JX-*6A z_JhA9*_{sjbHz+VLe8j8yZPK5v>|AJ5}u_MbzdSKXuwsC8X)O<8ypI(ZN*g~ zte7{_IB$Y99l?K5O7hq7$b)EwTiIBBVu*kNX1A#1=h4fxS}`@f?Ok2t68_i)z^Xv_ za_w;eFlFU~5{f}A(%9p#eN`!RrWSXBn}`LaUKg^W*SfJK(qOwwT_J`5t6ag5luqX? zppjo_;K;JN_&Sw{4<^-hj_}qdj$Z~$<7g<%H0aPJ8}V^hqd6t)(E?}kr^6r|$BdV1JhaVy=V=YN{TCi00QS{K6}JqKIhZJa8VnqU?kr@@ESaMKmxjvSK>p3aflYFna8ljm-LOHf z96U8!_1KYDHz@Kh?|<8yG0!mY>EEO&Y|bx4zepOV_fsR7?-9Vky%igG%tBkivIkF| z{rF-u3=6{d6}7Q30d%ZdH@!hOa}|k^cD42^_Z__6ZamfHmybc`_#*K-%jYq9GcLK8 z&D?3ZZ&7Q~zi`)5*$M&kVG`K1=!afo5_WI zjC3pq&Lx~sl7aNH(o?!p3$`x3W<){C+=W_DvCBuWSPmM_axLqKD!WXjs0|gioQUrk zgIk4RifA@vU^UY)cB8Cmp|^o1=z)!Kisyx z;vg)V1TF%QIk@TBGeHS0OBLFHop3Qh!G$p3!0JDiy%QXTc7lOxX-nUUm&}Vgs$HhB zT_OhCVi8@_xO1_J6TUBXGH1c=d9{<<)KQIMgIt;RA)iM>282d$ytJJq`2{&jXoGn<0s5=$%ax+$pH2XK`mSp`K@M z;UlM+ex)aOK80>1*#7{ev`eRD3FsH@P_BmlRi*;dg&zhbkZz3livf2JFwZgZ#Ld5~ zL!L7PEeGZ^9%be^^8+D36=zjVvrCq#%t0TD^T{^A#kNw(T`opo*icp8J;GEmf3c4H zgO;4_hC(2<0)vMMNYs(EETm8`-s!J0W6N!sct_Q96i1z zQ=+uXrEtKiiiHqWjx)SpBy`NRYPLZ+YmM;&FbvJQ{C4Iq3xI11uPKK*?=n{}5oH=_ z$96LbXH6tJO!372VYnijL2r2E=Z4Ps4htA z>9AKgWw|WK7GTiAp$0%r=|vc^w>JphR4OnsI}(x@G8KgfBWe=C%(ZDEg~4NOGS_}A zhqzIGC1ktYWlEt z(3Lq1``@?;c5voZ z(&_xtjE;%K@TX-dIKuOtjo=1_=d6L|a#@#JqY0 z0e~eakhSPqxGJ6k)Qp_cXLRII6>!;`s`%_*I@4y3Ieg(|x9 zo@J0;tJd;Oxy~z%LDprhBKB-TQd^U&PV!+@)^kWBe*=kygHYh z!&}pq>$;abx44zfHV|FB_~KQ;eqjgI@WdwL7U0an=z{Z??rkhO#oSxh;&YmVfs0(l z>gUAY)Aum*e<%ybScB#k0*wwjktK(qSs0PHC>#Zpl~lU#F2UReT9)MIm0UN%IAkty zMOh(+Dgvp2{kr7jause_X7dRO^jI3ZdA_S0A@!DDQoM+viVagj(#KPDnyFnl9fwVD zOSDSPg$qFEy8;*xr3I)niVMaD32NcZs4-09)oVtWRD_ZiscQ8jm`pbXJbF>~mTZBE zR4_0dSZ;|3i7uLsD{QyQj$wMORH4Hmkz}^QCIS zfZ26?LmHX}a8-twF35w5WQZt2&{e~N2$Oq)FCew4IZ$9=Fe?ynunure_zTJrx29Rpnv>n}JCjtjMk=*=Ec>X7<32me_VQMT0t;5y2kv zw(DfaZK9yIoRDBJ2BWG)bjMhW3h|momb+j&*hWAkt21qv)Ni93 zxmAi+P~aStqK#c$v3qMN5Fv=UK(!dIFg2A5?pGpqiMbj)LkJovIxeNutxU^Nm0Kb1 zswI-xDD|_)L&T$CQ5>#?Z@!p{vn=T|(%T@g^*VTi(`!d0s!(nQoWRA#uoAlXd{Dm* zyvmFX(+66(>OGi2mH**WqaBo6QQ;Dg<2|5lLAd6E&yw?qu;; zT}t!N^;8@RXj`V`Mu|zigIX-E(?PA6b|^n`l4)UtI$QC=sLu#dP*e4Morz(z>w~!s#ILx{mezgTQ295TYA)&H4%}3bAm%KE+g2L$z zmlu4yQE7G+vapilQJYpxco8po(^t8Sr1b@&-WWv{xOV@k&%;u)@3T@<0s3gTRI`<>cWz%d-QXMN(fc`!T42I|C(F4~PJpkyI{_c;j3` zjitz^3ovk2p_HXfXu@SHy)`cY(#-AObImSuF-s|^bcOR(vpayZ!uO3^_{kH?VNpB3 zh039nQMY2%i_Ko*L#5P2X<~4}&c3>18;xv4Em5}x+fFeGu!e?#rm@MA$j+to_lzW=4iw)Kw1T>KytCDCGdwr8)ZBtOh3Vt_2t9!3F#MM!4o=) z-Gz`VgM`E7m>i=DS|uSA%1sMm*%Yo#4WW^~SHTFd6=s_(!fctYVExtSqUrc|bC|?6 zg7>TKk?-1)8ajg@o6dJ>Z#N$ds4)&#&FQnD6(01OSEiV?;>2Yt`7w>)ltb ziAz}1hp!Qn)%Fkx2se&Cp^0`t?y*-7d~xVcFNH4pALq1e4?&2?+Bq52BmV$S@!i~a zEUGuSZq^OfnZt{30e9P^u9|K<{{XUiz<+)rX^hM>W6&?;67Ml<4Z(DS(q4BloMJqO z++L(#h^_wI#AN>MDom{=&e|7k#fFT|ueb6+r*s@*ioLZd%!f1VZjwDQdmU+3&5mrCan$^sO+Z1#}JxeXL+e8Vk+T)7?>b?BE}#9BR>`U-?Erig|8k6pry`c%2UYDKqUu51319jYL(rIN7@?6 z83a4=05hcmS9XV`l_1E~W|rvSb*%M+p=z4`O%J%1NUi`~6jgXw7|Tt?t7pB!##2}* zs&Si^bPCxv8y^I&t(2`--%pZS3|c6g>JQBXa8H|0J(~9sO=})|U6MCm#-%Eb>?PJP zzJwUXtfz-hreC3-XtLIEqr7k_VSoWf6-S-?+%jmuS79kUQPk0Gr+Rsm!v?hLqYbla zlA#z=2_78(05aGba+UIi`oGfV@CTibxD6iTB)OGuf653I=Si7oGb^cinOh*IE~No{ zM%K(?DhCi$#KsQ*>E1IHu2?k-IMl+_<~IyO(k6$qGuEXmVcOuR3;V-^qdUQd=7_+z zNulS%vJ+PJP}gh1-m{&@+>a*>SeQurh)=s1gx%bdsq0j>Wi$Dj+F^1EPz(j zkU?R268L45B-|iq7QhseQ*m z(dpcbRdlhxA8I|BZXTXx=DC#m=4!Dig&${sKX46&K%hDYNGdp~=qvC;d3Vd+Dk9VV zNQ3D>Ag$MpO(x>2ueh;t@LkO=q9*?U+nK@s?E;{-&K{v!46kD|UE^a=MUWt7fYxl3 zvlFiBUcE=0qX+3Lw7bdKI)d0^Tp|kNAOss^60ld80NLEAq27yLk(}{Vw~J>@8GA=x zE!NXR00B;0>Y55tsduImZ+R~SQ?Tq4A5r_*l~+KiUQ8f$QA?y|^n3w&L1bV%!6dz8 zG13FNnMqG;K~^ZmZeLAjZFA96jY}gUU>5KR#q8AH?Nh52yb|xE%#5LdI#rRrl8&sY zMO>F+bA*;0TLEK6R9$e?0P3hs9uJ&oJcy4N!H-hq*eh7F> z%#kjevtDAx8F5{T?8`3v&{Kl2!J*6mi?f%?SIlZ4q85F_o3ZO>Tw*E>xYb>bMexIO zU@%xqca3`G=OL4Pn_O>=NmBJbINXe%_wWPnZ&&-=B@dY7x~W39ucU zKBIE~0PJEusN)ZWMNoZ5;$Pe}%Ud^N^zJ(@;8dmG62;zGdFP0M9pk|+^BOv_AS%k` zU}teWuy{KJ&H9RJTDhzpp{H`ZqsPmC5c_C>cPCJEF*q>y6JCeB#)=^F;C%59eoh$| zi?8M_N-zg4I7I6%{uq=fqSA!|(AF@e!vOoD%VwJ&(>op#g$$7LuOr(eLwEty{K%PO#pT#8JuX=y@yR6)GokM%4z7Tl_=d!@l0pKyid) zl(~H65Q`L!w_vKW*rmlnr@xsJrMpqeizY$1@=Of0*a^W!v7(+d>W-py5hHm-rE2zP zge~PjKtm>oYe}rcg|M^5k+TB;)F-(Xqja>Vy^{9h1*TQ8M}-**3rkKv5T%H_fmQ`8 z)AcJR5QIHS-?AN?KpGyzdT}ZMHhEV608Av+BN?0m#P2s3!9#7cOQy3380Yg@6-#dM zFy}5v#iO1CRr8pF-%(z+6dbz+XW;_JKw$W-O( z+0S;+!xlH}+Ugk2IghF-mV@O>g3d<2qNz;~T3p2>gOn6j}IkP)mY z4j7cv6D%VTcV0b`o4H8TTs{a+1y1XuyXf@-B7sU(Kj_9_D*}fdqrwFhRk-A14px!6 z2T9Iz+8LL^AF;xHOX5LkOQEJNsJL^Cadu00mU71Vf+easXGN??cf1vb#W@6cFEkuh z4Ve;CuQpu))4s^Z6OI1>d2s3Th-9ZD*O*@D>LJ${&}`=y6{{-U6fp}$lbXgz1+^1x z?P85fLB0OS?HQ(e>jU_3KLUy*} zw`oYhhR;I&ocP`a01W|v=?5m6mN{P@@{{m}p!m6|19RISK_3Nmj&Jcdhf|o_67KmW z;>^g#d^Rb1BEW-hSe*M<4(m9^)9h+S*Q=8zeEQs2r!{*-LDDa zILOMO?yehJ^b#~!uXV8nG(A;7Bi1aVLwUSZ*VXuU8kkpl)S-o$pjygg0b$DUAxjJe z;qx64R(c_>zKg&XUEF{`Mq|sr)`iP-JYihm=TfMe(V~_BZKC)3givw;;SwJ<^)$Cx z;qL_1q6ghsjcx`UH_}0doafFZuVrv22JL&0STga>DuOR&77{Lso!bc4vKN!kr6tJ1 zg;#pV$pC7}$exioBCAN}LFGUK+!bHwzep&bEB^lg$ig#;lP{}L>ltbHA@a|VfXFKg z2i#wnIA9lMvWh3fN}Uc>_42nO(oP016FH0Uo(;GLB{Z{>4MlZ0Wh)P^0BZXSypPkF|p-0?g z_=$Rj*Xv&6<^5s9~< zZ=opiFit=MpLZ?0so5gD3a!D?>35+x%Rfo1n8H`3q-kcI=q6; zTlIPdn`Oz;9F|jz^%}RwF^b$2LCu$uu`rV&tfK)4Ro4>jPOZXd4Z(DcHj8#u*+Br2 zUsx`|fo{b1@(@eFZP--MuCt6w9x(iWsKxoS?dmV#&xnMi)>l`+{zqZ?vA3x%%!PqM0ksgY?Ynq zP#*#8bRrv6Fy5!qm!NSiC-9aqX?Aonxw%|)B}sI;jzc~m0+k?#Ej4@Q^Lv^kpQt4V zWgVqQ1CUR);NNSf#-X@x1MH1Jjqk^(FxNgI?ggQ(l;p^{=QS?Hn{d^KL4g}iCD0tK zA0{1et(*&vOv;!%@%ke*8&-ljzwek(IO4|3?=Y*xvqH0EG(n8&ASbZp^1ui6VSXl6 z_1(kyKM}bcE(fxoxlpcFD{AVT{lVifRUe&vMQujQ)mCfNNnwT6Jlgz1O5-7TM;~Oe z5O)Wh;CPBL(?`;=_N=K6A^`Af;4tMKEvl03M{kTgLM)|XBHdJLR51xPNV(}t^D|I< z!_NjiFA}!>;hxgsosY~v%vabQPh`Q0kV~aIi~T;Kh52<3VEEX>%)K1)R%`!{{V3jwrs_970#WJ02+@(vhb7- z8;ZhexS@)t4?4JYzOOX*c=)M&i;_d6*$U4OiIR@gg4;wFy!D8oZtbsxKBE>(ktfZf zLt55V_>VrJ0pgX!g0AY^JcTDNOc@eY};A~U2&^vMd@B?WD5m2W)EXhApAqTlzV zYX+ATtG-{_mYl5lTZ1kL{3%qZr4 z(O38?>D@;WcMa1ix*8(81zvB6do=5E)1@!Kz@T43Fu+SWAlf*nd;PN?KO)Hz7tRq%gNM?{4)yBezE5O7;9 zASt|yZ&-vAFklo~&*l+S4Ip{Lm}dgl&|XhWGU!-B_0>Kjqe!|R#!EV1nZjRZ@CtpN z<@=U`vGTrPh$cr#3D4eB5Ta&-Be||%XdC(Er2`1X@sNI{F_X{me%Kp`ZIt#uh&6a{ zJnO8vJ85Gcl*yR^(BIS`d(Cg?VJcMUO8KaR{a*Aa(>SBls~^xB<4kGAK`(>KUS}!P z*Jy)y0-X1X*-OZPWhK80|(DO1sp&T(aiYXF7zhA)IQPhOqUe*4UM(N>zTwT)L0Y~Uh-IKcioMa9Ctg2X25-5}~1$6_(e zXY4F64z|nyzHI!(@}^X#-ohU#E7Z;1VEZ2AobVfuk^97o;%zKe(jbU8o0!ve{{SN| zL0^H39h;;NIDXm39-@bIXiu1x3A6}w`~;Pt>h50_b&KBuVhgUijTI)b9!B3rUB%#v z!CrKs$RbjLyoZw^p5$*y1MGZ=30KWC?u7O--MVJ1^a>AT$p_#=-Ywz) zdGb#}$3}8dxO0Q7cHx*xt`>1Xu$R8IEjLRb17tX*VkW}bSX+zkH zVV970t^J9ZGeP!KZqm0Vhw-(Bbo)azAZ=cHoqXpi=?N@8_78{oDEzlRb zmIYYGuL@O%SI6!c&)<69brME+WR@21=t}}0evimw(}6uD$i+F z)zCG~Vj!e;C*cp&2Z9IOm+4$6Bx&JY6Jy#?!wBSXoueKax?KgE#uu7^DwG%vO2ibA zt7Exbd`A^f=vFf+^M``p%*A3a)LYGGQ8mV5t-(_fs+WvK#^U$-ir?wXr}lLf%x-rY z@v`N#TQvUw1t(jL4UBmG#9b9p&1Ncd6|6Iy?xwz69pl91hlS9VabCx9V!#p&wb||7 z9zwPaVxX4hia2u}%pzMZgN^ma;v1MLL3fbH_tbW7(jewa-gd7 zQs5L;53nK~0<9wTZ4c8Z0{}NW8^Q6NvbxygrS8=~bEEWJ`$hSJibBwClA3#$i-ieg zO{cRty}6Z`EA0bgODI#)Im!gEcGdjC*No7@^e88bl? zKRCeaD;w)CqaG0=YVuf~LQvIr*^h8NA>wy1dlo0KmRi@{OE=~|4^TbYUr?c+Eokdb z;q+Z-y|wG%NCa7Iq9z&#DTF=gC>t~gZJ>s?SscU|C2gK5L$D!GGGGC^NMpG~qj2Hc0*9QHO#0e72+1r%rpows?vAF>uNO!tez&IT3Ub70mX zlN$!gEC)Gr;S)qq#V<{F_45bjQqqpq>_UJHI|sL45IYLo>Y?qry*cqHDYABWetpX_ zg?Js_Z86&8!)coc@3*@qE_^_@XIJgq;Wrd-Jt2kLPQ%0~qj$b0x;vwG#d|(whYeOI zPetaBZiX7Ktaj*M1M`=eTIG!gG&{IR1&U6p9Yy}zZfrw|8gq5@V|+psvc(PRp#+4w=uTy7vg_c zxw`88MIEDY_oh%42MJRvf?nfKGR$0UmwJK|ZspS(aAs^=jfmnIXdK!DK)SfPnAE(8 za^(TecRc*7`ae+}Jbz9kP7JD`^iY{{MFCx^Ls*1CReAvO?1(gBVq>@`?l03t;vT<9 zXMb zz^{lDF1P^rBHwL8gA3_Ex1PdA99eWeTPb|cIQpmOukwN^`%W8_Zym8X3}RBB$7ue@ zT-{umJSTWCCHlbm+5Yn-AQqzSr7r;XjtPS40mTOD_F;Jgd2cN$IUu3(&I5&A}#QF}tw8!wt|goCo@D0M(7zt31o{b}Ay)s~3#%s5I)D zZ>nP0zL|Gdo!MGA_@XOV^A`7sj$(krJmxKKV~j;z z#c>nXZEx!_gjaNAuPe+^Zq9@_NbusCsMG``8}sgqZf`OM-^=`S4xZQd6T$5ge&Y%l z_bK3ep)u|66}e}qTtl7f$h7YDKuE86$R+nid@O0V~bnZKAB;8FjCJYA56rO&J|8ptn_g%@@XvF1{HiLm-;{@KJhXX zs1KS^P$0C9bp7IENi_obTE9}^*I7==r}GbK4VTa<_<=o2bFs79N?t$_;Ais20j>eG zdFSFJ1j+$>y!^{PNV27R)qfDi!+}E2Twg1MSCmsI&I>_)2}EhL4&y__x)aE(dEdhk zU|GNp+Y|S}Wjytmz0IUwJEc6LS{kK)1 z@KI4+DplRz45)C@%6hu0fh_^SMXPzo;eh~4q_Jh0exa5f#*b?&QVIy}=*<*j;;*d> zC&2#o%mF~*E`g=LQlt@{b&4Omg)2>|7Tn5qwjH2V&Xg0}{h5@DoKi2*vM`eCp%au9 zNs+UzKX8m7UYRjvpvW;y)v-+o|TuZcE^aJ%g!Nj9W>QI?%x%q(#7j!f;n$vBL~R#mn#W9<#=xhcc1RiFoJ40M25!`f~_}>!XUT)~*P( zcMJ6cmGJ)nhzxvR=|57z6 z1DE|eM&~ED`X$=_=V@-w`}CQrkMGhbYySNwSNnaZ^ncV}SHJPch<|>Od_0fv!m5XN z`id6MU&Rn){@=@Kn*RVoc0Qrdqef4KqC?IVe+d3(QsiPjZ{jwz>VFy+)8wK0o0DjN zO~USJus>#eAI*#*!jF+7D*#Hg#;D0uJ!g+unS4ZuR z%UcRt;KrKyF~mEbZg5cZ>S6oS17m6jS1*_XR9UEEHJ|G-`py8CwFYsUZ%zkMf{gDL zY@%gi=;jzaj^Z2JrO?|_lvmae6a^6MTX0Sb1$6Ve7sAKJ1mMNDAIx-QEZ}ti08yyL z*}>+ZmeJtB7VS0Z1ty)LMWj8FiAn&aI`vQpOPZ5Ts9yT|mf{gRbt6D&NSepVqWkmc z6^Mc_aqP_s#VWYfQXR#p`g|N(7SM8$_B%{&(63jH?kyPbtx*gV;IjMsffj-qnq*K@ zW1QirkE<=zjWH&41k+17P-<2!tyLoPR!G@e(?cpU9qIX(R9Vc8e{Ofg#rO!xl)`{XSn)L~&+ z{*c)gcin#pE##g-4`BSt#ogtj>`Y%9K7Mkp3E{88%OjSg9*g^f+_WC~54diG3BNWz zqLQLo{Pf?6&_ploKY|_QKr7^ZAfXOP4*{uu7*(%Gy7WbXdsSD+BY_Gxd!2x{mSfT{ z0d;ab2?jn|_5}mXRL{Bu?ic!t{kX5p3c=qetwx1*DPB8ugU6e+M?e>Lup>}2LVTEU zD+k!8=)&%AC?Ch>b#6*2;mhhbR0&@9$|m`auJNQPqB@3Q(2Qv!s$U#s8w6iq%NB*O zL3@=<^*xSSmVl}>CR9BDzDbY}35;}%2tXkCuC0H#hjfvLiEt4P;ORDz(#@;pvzQ!} zZW3~-(>sesww~+Vyv@kK18qK>#Ws9cuA!d2VQQj+6$J-8^KplU&o5tC^A6Z^(k^EN zRxS_?;?CE6MNPGhv7INqxxmx|0-~|vTQoe^xTF?mERl2~FG@n<;)&A60=mguJ8}%M zH0(1AX#lN*hE!s&v_>KIRt13>d<%+-8r7EDb(}&ft^oMXJi38NE&`wmGtMRKW-`*RRafvw?qS?Vg-=9wo|<^=*_}%u@eTklvwDp#>%&R^z$ssMY0CrK_5<~jiEtL5p2jB^9ALs z1huo0s^S-R+|8EJlNzgNg$|onOcN_eHDjM-zwHguiEp%;%9n2jSdmql2D*344a-X} zEF*!<5~rMQJB4cX_>7|fpv&j|QLt1Z8$*5?`CyrZyMRUvWB% z!M6+ckqZ1vV>3ggJgfaB+;xd-91drZQ;o%VW;(+S+-(&VabNyUx!o`fQG#1s0F^4E z{sH|$f@^_o#{>5aBEfUlHDck{hp78cq*u0YKkO>}Gqn+rI~G0{+#J#NwDL03EyMo+ z5mgCfWBESjRSZfRM5?|KiI!DIYzL(-rPXcjN)dNBXI@&*w8=G%)aAR9X04^2N((7iSy`<9{-dzwFS+NL9A*?jh!rN<-WpW2%R#%=Q&2lVE5 z%uM}cnV!(TTo`Y!976igDG)X&yjBH)5+WvyXR*jZtsar^3QtUDOy3*Cdx5!Vrj55( z+uBxc4s9%^oa&HnIfc+(!0zJU7SDVs$?)V=B68D8!b>BXq)Gf#Gh`33m zFdc(G)j}9RLANG7#)x1Tj;1=tQ69A}hz0Vo#$x9hnM^U?d2L1i0F#nH@ay4E9KnqM zT2a0;17GMZ-aphe=!~z1X6V|xprxGS^B5rnUo57c=4S{!Au=BmOF~qSw?44@nNfs6 zw_8)a;&3`_opq>I9XFQl54>hYU5c%KVAf!vJh&e+y@A!Q;y&g?HHlYszlZ}3gZokm z;lnZ8XR(ziuPuHbnwLn*fJxCc(HkPN&&MhP3o7oLz2i{wZUzB*vWB=|eD^c!Ycz31 z-eVR;xqSt=emIpPs0y5mQ$ypd zuZr0t31?Rjm=-jDhs}_%RJaMHu8QUu{lgOu8z0?NprfM%Vn`{YAH*{-Jp{er{lJ@p zM`V9B9bz{fO#1%-4aHp37dyxJz6+MjR;AY8`7o3Y1dL!G3~HQbfP6-b#&G=Q_0JYl zs4lhcysXW3-N#x)YN$+lvNao0IGL!7<|UJ33%l9(Ov~xUr{W%Dvj{C;%TElq%LX2f znhGmem*U1KVws_I(fE`Uw@RK(k>YOv{ltCG1LhC}v;0FyKXB`5r9zrFD9y$nld_W>P`{U3Y1My0PXs0nHRL>mhk+Uc9|S%sOQNEgTda zb%%oqS;XDv`dpl(As9XiUC`o7-me-ZV)AKHbq|Q+bcCo zTE%x{&eRaFRSxaW8zI38zK*`;5`%3+{S5csSXBj`#_9Y(X)8rET(}={wy7Mwcsf(^ z!wfK>0maXwEB62taZphX0MK$S;P8Zo5z{Ssf5#fVK?b4H(bAH^J#_q%;@_v?2l-@{ zafq$rS=5?t4m!uQake_l+y4M1Z+Iex`mWY1=A|E+?_ls0dWIQ@RZ0`kqpGt5F`Oc4 z&BG8c#A5+X#rspqvLRTfOwf2Qw6mi}dwN5qs$^E&33*v{kHZF;L|UEP#WDkQX~m*b z>byle1iLEg6d#yn)Wxhjm$tc!ilb!!R_zpPm^u1R0~w`%_6GX}llhBW?_xhVU=Ear zuU3%~+*`bX^-M}2k$l<&k4X*aOfgo})ihQ*LYv2*%qjwvswf_=`+!WWil>wGgCeX0 zpi}|0w~@3)?Yc3?xN?D_T`PylSm_p(bPVy!{!a95FPulE3aVfa-{CA(=Jawao7OtV zix>)cjuvMNP2GK5ptYl1Yw2~~VdN&(SEmcEvd2$YL3tQB7fs^g+5i~CE>3lo(jH|f zqfiekelrZ90U1t%r+2I3_N)ekwVU9u6VU$%eJ9aJr%ZDzf46qpGD!&&7*oBxw zjlEcF71opuW_o=vRX5j^@ek9aC`wxRuX8^H1hr`9*4NYu6s8V!I_UQS8EzF;*!{7e zB9#U`Ol^a8rK6tf-TH{-MYxP{VZDfXg}I!7wkiwkboVM&z-WM4^H)tf%0cekWsdAY zI+1QK2v8M(;49W|Qr4aXs+8sYR52|Vd4eFtS-vfd)vr886P}*%OUO}O$Nj*hEFO*L z#J)LHaTS@}ve|9jMa)y3MZ#|40CLKzqTIg!0P<9Z8=)xu$IR?Fcn-#o5RViq3@g8nNB?U?aK(T+QSx@dKm&Q7baWE_EH$z(IKA{^`Ij%X*Elwp0da9sz(Z6vk z3a#B1IdRf1=Jm;pTBOfrUSV?r+YcE}5VfN!oT~e2J4`wIat6K3hE`0ki)h94@U>TZ z_Qv6WH`m$06FtiXvA;0FAaDd#hpk6jn_<}Hc=5IaHL7e;rvl%MtW>U0JFs+=hw3fu z_~L`7`#c*=RW6jafUcD+-oPouwb)Jb@hZF{8C?g1h=D?x#-ReuB&Y~3)r-hCjdZz0 zqH#>uh(j33q1{C8{{WF?lC8y~G1{{nE?IJ!{KlHhR^$C&{z(s}cecZ$Y$ysZh!XH| zVmwbcd56}eWqO4E5jC+kRU`w6SC}I$La2=OK(><*ki0ww1L6_WY9rYx2GDvQmi@r9 zwQYw%o8CLZV3(KC7EXT&OJ5XlR$doTrxgS!TA|0NN@ z5z9iCR?0bAH8+c=AB>&m%|yORfZiQdv@=-ZVe>hJGlb`uUXY=VBARu&?w8}*EK{N1 zU>&Y+ndDeduun6TD-hgFkX&9Be&NYj0HsfH10^z=>{n4wh}hajfVMt$9)zI1ShEjB zMo?STGb*Z2rUQ`BR)>mK9o+Oel~El_neEBl>$uI3HB~P~qv-1e*@6X+U+LNd8!8%I z_p2E7!~;YThafzDW30e5uIj&9BH?)$1IRc?hCL9{kyh7tgYrNUg}PPyVHsH80h3;$ zz)q?oPRrs|9a7inma_+zE6`_+#!>6^h-CObG*{d_IOuBq%d))zPnfH5!!SSOx&GY$ z0OYz5=iUVnErCBUAwlGp*eVfPA?Dze9x=IKrNM(XHvR=L4|wC~f@-#*rdgGZtYy6f=>rh=k1uhcu0+AXT4JK&eF z;~e`@9oq`JpgIPEE+|TsN8n-pAOx7LI?dT3TVQILOG7ZrSPchH%pta}$Wbny%c!UT zpru@`b@2&2PyhAQjCE+qIifD7V?#c*u)!w+wD9N#$ZAXa6HVP{EVYy*0Ulma_^&7HR z#C!U|!zn5BD10&h09JYUsf=}p@|OnOz)0U3@&5pltF{*4(n|X-buh>C3n*65D;E^I zZx5mpQyDI?Xh<0Slebi0`UpekZt2^pG68{3F%exgyPT1x!XFPa5;F!2!f}a6^m>e0 zsiz-t^QK~LdWZv4P+dj5^2FF&zWl*bCy;<%>dJNPg?s>J0|(&TK;uL3Q_ni_2rO%o z`cBMz$^cb&*!Dk^Ok4rabgVI+(`>#9;*;)N-b6sPYaK47q>&=$C64XalI~PWg?56! zSB@Uc05n8&^y)@yJ5*i6FAH#m!(i41JL)2!OAr_VyE^g^xuA(#UaQO;elJxL`-s!x z1RlX$e!1J95w2f>6*)`ZoyIH)ChPGGMDq7@CPJ7F#^jW zZ4k5-D;&(EhZiW;YcT*Up)yP+%ja*->2p3xg^P z-57z{IM2fgS1qw)!;{pkM-wH1ZW}U@PmxK(vA-m&vFB8tO_2*=+*T7|0qnRvzC=gc zwrnMv7u2w(5(_VvdaUB{8YwzjwT3S!(KE7_MK*egqzQ&H?>l?Gss>~$Z>lRVpTtmm zK(8KasK**NHg*0m=>q^051Lp%!W@?<=&4G>Gq%#oa8d#S@GO4gT_6e>AD9p}&ue#& zIQJH=L{9fJaZ$~d=S7cUn{50|#COSnL(+rp<%i7x{FN_OHUozay?^Ag?;O}56FEe= zVPPcAWrxHBb|4(APj}fr@adQs#Nevn>cD$p6kiW<<%Xehzr4;wxI2Tp^C(f^bra)k zIAEx(e5a|OzGj1%V}qp~k9d?_K}e9olw=#bf>eO2UBg3Du*Hp^h+fd3%82kc9xNEB zg%pXV{KYxeTOs)cdxy3P1PXe8yg*ytO<=8ib1Ge{iB({A%rcC*ZpPo14H4xP1qkxZ_$^%f`R8b#D9{{U`0 za#90Si zz3K{!d_aY^))wMJ>w5gm(IuZC*W4%tQ*SE4tcUcTWrR514z1nC!yPv?(sWK{#Q+X8 ziT4wPcDP_C1zDi~05H#%@x#&Y1`|4ns)B6UI2|7G)hr@XB*8BGKn9Z+_Lkx*^)^-> zf|e@j3ve8;9xAF~PU{*m-czGa0|cp?=Jm($601SMUzgzbF#iCe8AWT7J41$6a4h|g z@e3GEU8T{YoywxCFK0-(Bt-s{#t;o6&z$FD#IY1ue|k$H{GoS4@)V%x5s(B8myI!5 z`C{%%9XQ!fc$)!rExRmc7iTMQtF9d5X|mz3FrBAnSGx+HCCP9rKtAHTnsK=7xblPk z4Xq6|b*RR+)ImB`)KXy>Jc^E>bvEab{oP8T0Ft&=Pd(smJGj5f9Am{W<~)%L_<>;&LtgCuU{`-qIDN1dj5#0qG?<(lDRpoFDC&Nn=1?iSKRRNHxv-9SQV^v9n6Ut* zYd=ssnoc}3Z2`mDaKV=2Vu*6uH(ichOx1Qub&FdAk^&nli|Yy9G@eh>12cKPu&{|^ z5H&#LoBOkJbx5G!+kw?KOBB_vE7k4V2SaB7bPXOQ7#Wz|QRTleTw$uERk6_92x|=0 z4K(SEdjzrCV82my5UuIBNg#CYWx%XZN>?kxMdvw+gH&vcXrbf7jlhAG)u?;adcILn zKabqATXSp6m!qb7L|7`Ur)Nw$2>8SmS0JsnN>U-ioF^bswkL}M`fXLlA;JQeT78-l zGC;%?E|hbKDy~6{@(~!S(B@zmte=R>?Sced zRZ*mIgB8$~NbygR?Bb$p&cYe?=>m??Dvub9Wi56_@Z7mmBZd7x zQ+2g5nC0Grlv+h_t!SfeVOz=+_E!Tat>uP*0?sTc=9z_%MRWk3oI(}SQ5z*v+!Ph6 zCT_9rB4*evUUIQYsJXLOIyqW7Ru;;ulCDZ7C<=8O)8XQGF#R&1=MK^-$n@uMOt_e< zT^HA^1M5m;DPJs%l@(3734E-mH5~;_K+HL--E@eUT54b`90+gxK1$|G;w)X6H|P7z zK&h?Lg}l|U$4fzcwq)$WUJJ6Yb$xW1(QK<2=`c+cRt53#63Z(>SLRir0~)dX!oj*} z%q6?F-x9@v$ZizIUTcyd*%zb#04Jxf?o!IGb1KFG`+E^7J3jPV+Ylju%N`<)?O4XM z22R%o1AseA!MKaxiQ;fHK^8>f zY%f1o7ugB0Z-`sTaBG+uGJH=hMzRR3iC}SSWx5;8uLS^#qX<6un0%F*x_Pdr@q!E; zWmlevo}k8$TRALATKMd!!NW%|1k)oH6)^-fE+rsoTb4ku@_-3ly2Dr~IGn;5ix%4b zOm0?ID!ASgv(j7@4bZl)a0T!wq)q$GP81yj(;Z+R4fGrvzgG_ol$JmlWmjFLh{23b zBh+AE#0aH8uPrbnL1L<}+^J+m8dNVVoPXs10K44SjUN*h^Zp+xv%w$9KHS$d J`6t+q|JiX2ox}hD literal 91652 zcmeFYWmH^G@HRNO1b2dmA;Vw^1a}ECLxK!$2?Pi(Gq`&Q1h*svhrvCA2bVzt1b4Rp z!QGet?>)Qke%LSjVZUw9IepGORek%Zr)%m~SKs?E{jdxmgDOFk02mk;0F}od;9(A+ z0Kmb*#>U3NdF*g-aB%U6@bMlS>60e}MC7Ct6y&7jKN#FjJ9JG5^1A4{ZQ)e2g?KB`l0*08DZWEOLy8 zE&vk%fPszmi1vRs>_>6+9@OnpQv_mR&W9&#wI;v3-OF_(~5EoRFOJlS5EgLCeK$ByWlqr$}FflMOaR_j5v2n05 zu^yp5q9Dh9grFG|k3&iIOhC>Io}EzDK63a<9`+$;1(%u~5nSCddPDL8K%uS+j)T)l8y;T*YQn%TbQeYko;thI)QQ; zQuVh(7~2_jc0y#de#Gj8-{b7^cejZTNg|GRUM)pE07SfV#Z_z2$t4z2On_uIDKo6p z*3)i_PID$`aau}E0Q*P}!WV3|q02X!{OcefelBN_Wz<8Z&|NVSr=gygJIICO2sKPQ zf(I--Xu&`S@I$>7N2OR@5rzReM&?b9UcUJ0P6qI-3as?@IfTJtiFl{i{h>MuWAq5dpF3Fq^DKK5;wgbJ#J zdkX;&UFpDWkSVQmo36(VdbiIi^8L<&5WqObKE@s|o&0D#q@HV?kn7L#K90U)gO78E zk>id;+12?|T}WzBge*}o#?XAXlDCY{Fn)C%f)CKlL)oEsDzp}1dTg*3^Sw7$S1L>-9h!#JbT>jTTv6Kk55koRvs zZ&sX@FHvbf0Pxe+VqLc6>f;Ew+nClTQ1@xov#FD{MoQlucm96CHTEq3wc?7j;(|i; zx-ivr;z|#KRl0@b$0~U}{FwL#3nus|hugdc4)V)n8w<3$mH>Ky_Bp!Vqda4yx=3Rc z59kBHWJ#crmO@GXm^9sLh1&1cK?$kmv@3Ga#RFts{)chmzz8I>4b^aCC@np5*KnzA z!$?1YI?jry^o+Ytw>Fr_&eF1-Cym0&3BXVSXu^{nxF;)x1`J;U;9eM&2$|dIyH!iI zsW~gF2r~Fz>r&W=Sx)wsi)r3ybU%rwbXw>G@6iYiHy_$&u?*Q*Uh+kTM;!X6{5V04 zYkE>#W5Knb4P9@nkJ8Ty8b^(4PYeVlSOtfr9_J`TsJ- z+GbG1uFj5$@z^0|1~m8qo{T}6vO=I>N8Cu=-Ic+Xp^R7WlD{C7CzuoyYdXJJXTpa7 zqP<|2@?xhV6lxd)Me+bTDHQjY`p9t3yrrpt-tFfLp9$&E96eQ9oKsb+n?6$o(!h$YxL{TGnR87x!~HGCmIo~YGO zFn*CHx~ynS6Q%RvvEaszvPhiW2T?PQT#LL_AFVAym~y?g(F8UjeqAHtc+1%(@t5(! z>wU9a(u*y|o!m+;uE-hs7iH>x2mZIvErj$x4*nMtn)@~Wf{z+S)z8+$|NF<~{?`{? z7`8qYpST?DHIDngPsY5fx2F6kGI)}}@}GUN|Mj^Gwzt-gB2_ko!eL~~_luGL`f;DK z_t8wYw>B1hD*ohpW75~s|MjttGv+>uc*6SrpTHpF8rc2|$$$S>V3wM!=!tp6SxpO+ z!7tg)^kez2S8klR9C;L_$p8P)|9{s1zjT<4kiReTBg7pLyoSj)Vf~|YxLjmd<&~(z zJPr;!I;Zf>gJ4|##DpSVz8bFOWzg-&jUu&iv1k)}eYeoJVb`zZB4QBje9VvmY+78o zRiVYffDiKzfTtEOQ5FL26sOk{{axsXLQH?rQmFe>hN^i zo%&13hY~%AcB1>N*0mn;bUP4buup;vr|zmhV0&U+f@U-g*AIr z()B6)i37)b)lkl4rt?10lDY@L$Dwdp(6Uj?1KZm6 zhzxeGW$twILSjgdyP3b@vmAX}egKqG?~GjvpDO$|bo_?)8(f-$Ok9ed zD(Q9n&>3d-pbc=~;q$Uk4T&P)KMdH$u~7Xa1~Cng)GJK8WxNKdb&SAeZWWiBjboQj zMwDLi>M{Gakoz3KI<~$w3or-td>g)Qm;3sJPQd)~w(8w?z2&nJB{g1JOkaO;pYzGq z8_84s(g#2YpV_--s|S0?u;EwN+Rym6`Hya^l#%Tq=|mQpUCMWf0f!!cJF$X!7l6?Z zfDS&Gaw}d;v`=a?R&S8Au5^{^lMK((dV@#Lh`pB6KdqN+elBu4_AcoC^3llD%aXq= z_Zb0}aG8){!nFNbVW z;%XX;xWj2Z$mO~8(rIwngg>dbF}H)97jxy(LYdB`luaq75b@mS3}4>6$N7t-_-I_V zHBas7vIU(xMlj)PydhHU(b=_yZ3Ts6zOf21e5@H;kR zgyzrtDnWucmwO?GM^<$SA3>(zbBOmn034cSyit0~=lu5u+)PW$BNy9d2&`>qcq}A3 z_ADancS!Vx(%gN#kof!M%j^s$t2=I|e~1Q__OpL!cJ9r!!w-OM)hBTkZ-Uh?#~mIo zD3$Z&K+?$!(o!F6Eh^gYVJa(=@#^LC*$;pil%A{}nec>jl0>WLi-XaQb%in3dp!p9 zX)LA!lg>|g^jAi>9u$<wdtwQj6W|H`B& z-*E*vh`z|?wV)2->&@Qa2td?VWT#sb;OHis|ANYt30BsB9b3LPVmK|jLwWnM44C^T z%hWcC{n>1Q%Dpy1pf?9tN(tW{K`Oqa5!5K%>*d!4oo%MjO4JCj7Z5|utTHH;g~m#q zwhNXt^UJhKy;Dr`*GCzWnM!%s%pgnHrqR*jR)lm^mGv7vvH6*P=Kra_y#hM&8(}Y- zuBOn^9_2m`R(>j->USE>tW73b_o&F+sPzD4%Vp z+7;dTXvbPOg@4?|q-Wx))M=E}_qjPE4kJ?x)hChFl8PS_^Dg&Hv|^v?8@GGKsqod} zi?SBl+i5+2Io)e_{o%V6;~QV{R(k0Z6=N~RN4pxhRDTy*@?|UZ$=Aw{{yOC3W;}Fx zlQO<3{^5$R)3y9h1k3Wa)LOCLjOO`yJNqQq4E`7etnRoT&Cijr zAyl?ze(eYjlRaktrL|G;j6y&njEB#utu?2@&2ip~#Hz$>V%g#_D~*(;)MvK^7Mffd zn#~SOtmWc$Z&5|OW9yA)N;+58-BzaSl`3WGk)7VFd|Z_3HPbj_S=QyK_OK{s6Gj`(SZP ze=Pg7$^;h#%uL;IUOqi|00^l)0De}S>-3XM156RUrE$*j`^d0oys zZC^95@&oR^2UM5)Q;4t&rS}fK<>7-p05BH=Hordr&Moiz+p8DxB;Gs#Zp0q|mL}%I zt-my5PHG;t)D0a=Q3da6-#Vafk!4{3DT_#_7Xk@#Zk0-1)P%K-it&GEb&R4KkF_>J zH>~de787?{c0lIS9|5Wpy6X5jDbk_SlSX1C*xLia%XBWl|6QzF+D))3z@#90 zw5p-L`{*N&dBT9a?ib??4h67AOb-6hhJmnMJu8Fuzl`_IQ#V}O@)Tn>4x%e60EY#A zBO9#U3j zVBFIRe*l#H4|li=Z6{^wnOyqDC9bCgBOlB9H1aBgXE4+R^MVn=eg9Lu&konrGh~tA|WeAKPS31ez95#-sl%qthrR-y67RL#1ibe0bwkY?51TiO+ zl}S!Gu2u$zCmFig%ckINEmBDOGHa5Vo4(!AgfUg1GBEd=CG-J5W8Ej>rUj@@Si zl}uZ7YF2lIuOE5en0zh9%KT(uoI+$Fc5;H-u;;9T26rtT5T<)2DX~*j;cv*w6yppU$dE$Md(8LsWaJx6S8vbZOT?Ko zIwhmm#tyz*<0+>2coCX&fc3q5Cy775%fC)CI=|-$<0bjSKX-%qN>+ckvoS5mPXn*# z$;ZTRB8h45r;ue!v#JbcMs=Bma>qDUMjPuMsofF`BEc1=){ zz1B|m?#%1j&zpV8-!IxX^Z-x`{Q33irTgUvyxQ4{+nr7G_kagLa@k{kzd7*$*wRre zk9XTK_H}bde%m2%^?tq$q!1?YVsU#>dm3IuO4d z&ZbCEjH;iiBoL9Xk(R-$cmRl_c;5AFuGud2P_|yx=+1jXC*sQ0JC=Vo8lvq+QWE6a z>6t#F{k3c7O0Qqn_q;B2f1>rTq^7%3_^91g1LQkETiDE!EHGK>spyQj_rqu^2683RIyX%5>S6 z4Lr?R7kM@fxxG*d_-V)z#(zuv^Zw7@$9!o*_u>t$(t;t=*m(zv6DbRI`5+dXXy}W0 zJ9Ld;p4$@|zHP)D=l*5&lV)7y0dP_tdmoM9`UR)_EBtfgFcSR$P%2SsG)rhPv;|9R zO+5g<8i|dbw8b^^O|u@{lj${Gieq-d=l+0bb{)4^jvv$0W4rJE7imvkv$wkSKKimY zK}O1gXW(QqUxL`P<;M(Jt>CHf*-h>k+zIz-_Sb&2w(%N!n57IimgLWYmcahJcL=zp z<-3yL07D!ELTB0P7T}jSCwLC;j2iox^=pm?zNXRyId!*CA^)=i0YmGleig*s8qJ7)hBd-uQ#;=o!zKqY(8qR8Tnr=Lwsj<5$T$VM zHJjUKALig;MOUzbx$y9u{oakJ&;}JEL#~LN4uApZTYDVh3c%$|3$tnV&$}epOOg2x z$-7z7LKXCkvrhLrHZKBr9Aquoot?!dI~k2aDvwH8GkDu(8$l9rsaPCXcnnU4*crUL z2sQo=7lTKF{V)b0ZTqt*efqxHCJGT&tkm@_>p)p5JdU7w7h|=xl!*}qEW9bhK#U$v zq5^d2)|7mMqn*VuZsn#mG5)_j$j9|hlkV;4mK3172g7Qjx{^8NFg8;mNt9_&r0%qu zn9m^FXGGeE6cm4y22;3HNwM5Mbq*fi&fmQk|DaR>Knpx}@%_H4L#_ zSYRM0WBf{pT5VsRyRkf|xpY7BEY(3;f2K zwVrZoK9Tw(eZtX`G~+N{uz{UGN_NB^>adikCkdKOrZ3V=Qz)$$2+Lb1mvtqeAL7?o z{DyzYH59ZYWGmg1By*&_KqnEB8sz$`9P}@W
&JcIgf7VCzn*g^J4$}pe4Sb4D$ zWgNF+p1X!}zXsl{A_n5Gd#9n{Q};bM(=Wo-1$7^b6Z(Vv1;Iad>P90`%-Ns;zsCZ+ z7|>p%$4i5Wo{bNOscqq1|4CQKr9;D9OO51bcKEq&XNE`CtIm)*sa<=Afq`UZ1`_=&qDvM+ETEOM05^7= zT>GHKa&)zD7SoW_DK=KVmx)d_J2vs?TE5xnv?liuSKoHI;a3;(o)rQI24*8_ zmd7B>xEqQ|TRC*u4JeCmku8!@K8z}}vT`@l_ekJm)4MR0wk7YZ0GytFw+oda(g7p&^Ti5T4$((p4I0Mj-BYHEiTam) z+E{e}XFnV3_}LYZWK*FPHIoF`yuo?^q(!=E^hw=}$60c*y5z=!{M`!s?VL2T1o&Ia zL+TH+0CIRs`t0UIfjxI|R5zY-{_FtFFX3zs!kr8ax*f8-{owdPPhFB}kRW!v@iBko6hrzB$YNq+W_Zr& z<$RD(p!lY49Mx3t=e~Xt5=IDElwB(MgEmz%5R6R~ALpxxk<7?!a8k;2?4?Vi=1v`) z8&ZQ2c0GvD?TYc4?3WfARMq95=Om`_m}@MN=`e<$zU3DMMQUM5Gv%)7ek5RsEDIn| zOljotje6XM^Do6BW%<=7$HsgQD=K|yKDVy}}<@IG!mw67`LOEzPa&i(CO)TrB>$OeX%S=!Pus!6-VN)o0j*GiuCskj5f z(1rrXdL22*R)Ekg>7CC55d}-m=uRHV=+x-BIrg^QDdT}d^1PUOub}% zB4TVA&WiIdOH#7OW7wToHBAAL4qt-G!!&!6;=_o%Zn&}=J9rw$f50^isc2I!R@md2 zjqe+O{9RC|I1X~G4`GHWy(Ga3bgKK8t1HoERuG=@$j`b*Y{44u6!7zB)d*l1C#JTV z8bYe&8oK`@-AQ$~E=&n*z#akpg!u~{1}&qQwl!Zs1oD<1fgODDJQPJva-}vr<526m_;$ijX+f;=)5qF=n2+W>RHsL_ z!+jsrJ1%^OOu11M$h2vQswZbwyQ{>r;5j)a-0vY#ltWwfnvSL{Xz_^?32QQa)d^v< z2TP?Nr)>pTC98h4VKV2UH@gXbXBq{rFIh1hTS_Rq3p{|jE|#W0eOa^k;aYO@SvP0Oi66GeWmYIf1t!E! zZy<8g3s9h!Vw_WUek=$aeAU<%rz>A%b|!k2sp5bH?rlU<$aI z_(QP~l&1B(%+Sbj+~{*dqCeS6Xqy{vwj&KQAqh2brKPYTjp(>rnulo1VY}tb!FHRN z7`#oMVY%KL`%L_BjSs{gA%;x`Ljr^G*&Dhr0SNY$PoVw%iQzqN=9yKFW6Iqjk? zzzHxyL{4e0JyY9~U_(cMtH7bhDcu8LQ3Xje2Q530dj4kHgMJu08)gw2<_r>*#m8V1 z1OAh^7zDzVFBEyOViz@k#-1f`625vzdOyBD-Q%5d_v3kjb1WA0>@j0(W~iN`VUW?t zyJVMekj1Afk5E~e+be?+ND%JC`HCU31i^hkA3HW_oe|Dn266+arwVA6lf;vL{jCdJ zY<@GzIhYs$$9~Mfvd9}nrgbA&R2iGFpUcLQO{t(jkRWnaL*P{nM4{4K~a-^AV-}js|+#S74RTn*o$Qa$GjN~x@Oh# zf^2N4>E*?S;?B=P^5-prmHhx~wse-}4C`9yr5_ECGpzG>KVmY9ZmGFPZ*lrt0yaap zQa#G9e|8Qq7w@FB0i(Q25J9caZ~*bvfURU_L$#YISe;fCooL>GIyGR)17L`Zxku_S zXte<-hna;qnxhOkGI4d%Z;N@E%7rYIgHVpEzwq{?#&NIY9GqI@jg&++AOn$ybC0hw zoO_Ilk(5p8u7%ekDV&AR{rkJnja!KLj?jNemxLqArq)E5E_V(kbKs7+mR-2>9NvTCpoOEN7^^;Q{$ahCqTxvJ?r#BN49diVv!Z`&P zn@OQ1PU&58(}bA(iSzuCNq@_(!23P{zucWGvDV>A zn6Www2Ksd*9bxPY+H}aFd0d!_|Axk_EXWGkS*D;Y7lzmp=~d`N7mP^t#B<4_0BHzn z#A(KXRu0lBcuy@W; zmqfsTg~CpDZtTm^y*EehJF7$ME@vlqu92ckwixD~(-T$p_`}!KRd3&V^_eEthUP$6Js@{ zn-P6}(sCQ*423@teow_VwWq}VsDN^C`dy6=ar>c=%01DZ^A2;|JpJCgNWbpR>zzX- zks+TFdpCB8Ns%44ZhNS&r4sYST!B0QBsftRaVZtla*`AKer-fews2p~E{7n~l|o(lcYpR`VEyMOgP9$K0(~ppU z6-Pg;wvsLq;t)75w==UlVV`4Q~yf2vP2 z7it$`43GHt55;MwB2CD&WVZi2iE*D_$Ffg%fFo?dA-DXC;{eO0s!vX_-MuFtnArhyaKj=P-9%#n>=<`?0u>uG<#AP|NKWmmMcuZac%JDGK$_ zlnlejW~IQHM(*xql_B4s zidz~+A0O)j2C;3KT?&!*8+Da&qLoUbwd?$;ADdgBy#W?;Ty5~ajia2%bB`f`FpyU< zn5TOX{7o4v-d;+-QHu!5WJQ~Uj zGU0U`DXQLNri@0p4h_mnREBvQY}cWxk4W$YSlF1wfRUq}<;BRzBJTD+(2!JDeRua) z8DrhhuJ>xVMqEP93WN?|ai^W!*_E>BmxJQO5xy1(V|kgaM_&~p_IdPKnOQ;&HHxaG zeR^-0Za7-Pu7vr=_q;bH3dA@a1Y8bjX4cbGg&yR;nO8Z-<3AJ`raDLzG&}7-lVY;< ze4!pQahD0PWjXw^gVC?v3+uV+A@TLak=ltC-dhn77F<_xs&W51^%HJ6IhsPDL6-jx zLeGf>bUtWhwJmEJFe&r1Y=ox9k>F~@U!utC(4+bV4otv5C+9(X1Sh7&psd)0Tb9Hv$l{o>~R)=KKRFGy2&}D zd@{CyKobg_2_S$q=~?D*w;)ai4+cB=Cee0mmKUT?Dc+UN^&gN~^(567J{cH9nuL4M zi>|A`kSR}Xq$ap*3X4YPLGG_y+ebduYd_X>v5n&x{7}@$-$2%a&zK2f`|3B_4}ffY z-*;W;4GQ?RWdM3F>p6)FI!tIs*eKp2nOrONQv(K1?QS_0u(XgYSPriyIHM1bP;@b! zI>R@%JlA_oEggc%qj9U7)VXUwkxTmnraL;TOM$-^*aFPh0BR7=^5JEvWS~17hL;^s zyqgx{$BfR@8&Gag*l$y+kY3=$=#)q6fJ8t<&;8N$+dR~56Hf%G>LuGRv@N$yEt;2O zTv%Hn22VlrIO{Y`FWbg>~cgU?*rZNgxgr8p4R(^*a|3&eF}XHkI0tX=7u zW8uR3u5PinU_34!eCIcQinbQuzE5gMGl`@pHu!9wn@b zTxbv4i0t%vAvDny1As?j1;c~+4G+_fg+>5JOtqyvWCEm9vd_TFsjKkcAqU9c=AYvF zdy|sQH-Ju?uTpPR=JO(lWVrk^;pj zBnJV5zG~02g6^$d&Qk3)S)(u-D{WS91#Xl9{yca2sr87e=(;SLIqH;2QIBqkna`KX zbZFCq{rqnjVj3GYVl&z}D<~4n8Wwj>0zv@0ZGU1cn~7}iJn`-mdopWmwJqP(gvL05 zSE?*@*jQ^S3#(jhc+UeB+~m`0AvUNm@LMumc;<}8)qZWMl4-&yb>kH+^=Z|-B%Viw z;9ijt&4J3GUq(lZ+mgP{&vJtMJZ1iC++DEQU!- zv_JRm2L;PI4d*OHgLJ0tp9J8zlTrfGT8b@7ODL2&qhFb={z~~bp~KAMC1XrW73l% zw@MI?J-SuL!l81|@vo9Pu1dN2_x)}R?$Oj_{zQ^7pLDS=TBLXiBh+m>o*I=&$wU-* z6!ik0Fx?#apl3-td!hx;ZiQ@7>QP@U&PZRASI-(R!;;52!HN!f*umun8tn9Kp8~1l zJbDY7r3%wf%}g`=Q_dd=zZ@CBN6Eeh4jB9G*l;l_0-`U5B?EgC)4(9}U`e!_s9bUy z+gccD+|RWK0L#%LiyLnSQ~$Z)D3NLt3B_#M7izPjCzhe$iy+M_sHWj1OjKSwis?v` z!v(A{Z;#-Yg|4csv60x9I%FKB!m9-fgm|!t9IyT6<#U9uiqzaU zX$-ux)RNwI-JVj%Z}V$9-vdc7nGKE?FiR%b0qfouS!69<|1g9Qr^0Pk3WD2oFI3zN zHY(Y$?)S7{K`ZE(w$v6B7N&3nN|-XZaHWG3uE{0i0J!VL1Tg5fnM$4|_f<8|Wd;n|>wbrp zNb~wbH`fe&NGXKyV848$Xd2z%=L&%T_IUuT4OPWjKLDcs(CPYl zo|N$$&=0+8O*f7Hd7d^ny{$Onzm0JnXptfp}=?|7JGA5`23e~c)6omGL!pZ zms=IMH8eXRp-tk+E#{&}X< zP+k>fk9~ozE9V>GsZ=|uo0LhXG${~3>0=#Ia+cLZ3}*pw#smq;*BCm!MxXM>fBRA% zR&=C2S$P9_Av6i<%8kWyR1RJD*lT`^dH}q6`sJ<4B}i-5g7)b`n-OpGX3p%<@AU)% z#>3R4Aw5MuaCSFDOS3$Xbcy$#OWxU?8=I)W-k)E(m@$q4Clgu5^VEIDqT!oE2CzC| zJ|#B5yu<*!sYY2fv^;*HX7{QzFIvQ{W}Zhp42SRNnhp9Vv`h>X%}N#L3;`S@71)m{ z_Q*h_XwmN5%pZh=(=DaUW8q}G4nfB$Bx;g3J^+Epl=+H_XvO?Jn(*8wI+ORo?}$ZJ zc&x ztdkelJ9J1-Zz3p$nGw;mctH4GcrV|zlWgzA(V6!xj~o1H+_BLr!9LDwi@Tc49BaR% zG(SCDosoIzp|_rrQ{}#N2i0R;VK|`Pb#0gBLc;?Ff?NMdd2h?3MVyceF?Lr4ogGdPgL3v>aiI0`_m8I7@)gR^386<}XAu~RHuU)Iy0?-? zEyOm3yw^j&$G-Zc_rFKZR$aDzkBQH ztIKSIZk?JEa)M24tLMHIavrH85KfmOBp#(CjHh?bMJ8;^uJV=yx(5gLEH5ud7Vzly zj+o&kcO|KSdnnZ8*4@`dmZo4sVM4xCyEG1JMpNXw#WZ2hx%xjyu4>Z{X9VM)^(j!DD5mD%KQ9SV6UvowZ%$dZHJ(4O;GvgG7C}4B;`9 z-F#vb$-uZ@00=q|{X)Osik9<<@hc{~&;6v@m=TQZSaet!GN*$cmrCrBA0^7l3cj)8 zQ3W$4WQm2QxA#e%_VVdY&N{+O5DbZ$Jv$k$2_4D7%DjPn=aK8Bsrm-5y^cS+MfyBe zbYC-bFj^4V1JH9C{@>iN>Fi$Xuhgi)IncTQsgOztI^zs^lgHqf^2{Mcn?Bafmy>lJ zQ*O`mgmGxXgbubZkSx5pn&D-|A$2)CRP?s&b&nd~cClW&*;d+zQK%x@My7rjG&_I@? z>B>=wlQE~OH>yETKf0_1kqZCr0y-lp7*@f|7{=Xd|E?qYz`IsPN!oe91RyoC64x_G zgHc-IsjV5RAZ%dVf^|Lc0YE~%lF_>~mvJ2sL7z?fNq*vQi8FhUDQg)9dswSS;h6H_ z&%z=Q^6ggpofq*KvZjzD2mFQi@7;S!1e0?Ss)S7zQ!KG%OPYQ$Ay;iK&GLv#LhkoU z4ZMWlaGto>dPX%N)kuIbaLkc6WdV*Vr8a%BUgFU9SjSF>W*IZ6`bQ(aVsvwwro?F^ zlL{vI&wfD-c%3etnP*HSUcvQ_z&&OX^hIR)kFQudBZrkzGH`%#&b4V+^mB}rUW{W{ zRXoj(8UVj=WT3JOqOGX^ymYQf3ZYT4Zl#4fJA)Pt0NXRGzWUrcnZ7QWZoU&1}y;1S)*h52QYdB z9{`FlJ@?m>h?f>tWmYnSfIPqCr?lf5+f8$=Rsk5;16q?!As+Yu)HW&ZyLJYdwK06UZ^y=Qun&v3h6U^9cn&Jd4D|@F4Yk@ z_6@J45j*X8wpug&X4ir!y&Izre=gZYzyDa$u1Y;{X{bgX_btY-*c^32dR?2)SRiqa za2AN2ZcEnL6Eo90vl3#-NyAY53I6Wf}p zHNhi$k+M(pPNzX8s;z8Mg`gKp5X=7b-)I{=qcTKPI(oHC0%SA=#1RTtVn)Q=@by7 z?)m<1O!h4-J;pAvlQF29Q>ABSxV-+?TR_kfTeGqo$MA?D|5}=K_N;~stN*O)I4#ZI zQBoF$mpQ(Y-_|%6=1Gayg!t+)lCd!H_a8?5tDd}T+wnD`FKM47tOzZb6&Thpt=MOb zQSKSZ-YGRdnO<;wnmlq)#(*e^Mu6g$7)~U#TYL*wl*ZhJ5y5vu1~<9ryO| zR}CpC0vg*!Qo6J|{4O{ku*lqOg$tCsGGSy&7KKC-fn-Ga@nW3>FOfpMu_eb_uysI~ zgegUK2ksn+mssHdEtLoXp+lX~x4cj7HkOj6fCLh;FjEVrSU<5DUZ+x=?#ef|o_NJ? zU@97$2+{fk_a2xSWh#crlG?1J^AFW()ilS@XR4CMklq8i-W5;-kd&*Up!)~?pYPFZ z<49~w+-~V-`7|jTG7bXljRtX}HySAtDk=-?O*QK_Lu2+L*!P#KK-2oyc^&`1clVtv13(^t$LtV8Mr% z)_4zq1>|-!8@oE2FLV5a@F}XySFy?{&C5a*^JO+FscOX><9FWtqzPiEpCy5&?n$N4NfK#vs5*> zlh1aFvZ{8UMf$P*VwoIE!2K=QsW5P4_WrV1;Ks9$3hQ+k^GiZCgp7l?z$iLr*0c+& zx=Z(m${8!^gfTz?uX%Da%*gVof^cG@G#Ll?7JS%#I{ ztg^>zpOa`gq!s0+c^~`pP9k{IhL2bffTK5)L>M$&oS#1^=^s< z^jPbXzO=p6%ld5=Ogd@Iot*XGqvAe^2IJ1{yrH(#);kA+OGGqE>DPE@*UVuf>PYx zWbZ?{JjxtB=n~m#?331oNLQHLpJtlLCY^iA%Ng|et9}L_40A0psSKUP(hj*FEn59N zqkbA>r?2&69Mk2(XZ*~tBmXQ~u?~yiD`6AC6Q}6*(DZ6`3o_4QI1j`~IQA`+ob|VT zeaaAwilW+C?Q%9JovjO@TM)r3yv5>A3D#*CpGo~Ty09wgAik5J%Ff30_sRe}9u=og zY+AtjPdV8NWRDOk1ckdsT|{kWiO2tNPmdb4_b$3)W1h@0Hl0g;#<1%p2T>*rw_j)O zgc35EjAQ+i2u{X4^OlAGu@s$mA0FPR>{U62{mGF@&ba>a-mSJMtm6G>DzuaNvXI|E z0F89xQXL@FRHwL+0a|#Z--72<2O5mO*D0m)s3{^$-MP;TQt~7}1sV1yRHaNz1|=nw zQpkZADqRP&`34s|tc|3(f-c>zu;!;<9Ixez+^f7fD#> z_%Sm^s}~qK8QSPafFw1RjGT&ot|k{a>bKJg>D#tMph}BkL8?3KxV}2)S-r3fqQZT= zr-(IWXE{CUSJT%HU02W=Mt7@=?g%u(e- zD_fe1MQue`50kAiAU1S$<2_b>L=Wh4*Mh|U{-JSy#{L>9;g7hS+F_r0=jv8e5%G8b z0RZk3D5(A3S7-Q@*MfIKy7q?yw+0(@g3E%%X#X3mk3#ybv^6w3^Z z0VW)po@R@>X^NRO@wU6IM$+H*l=aM=-$i6V~`ddVtl}OqNE+ zgm|E-C zw1b9DQmw(r$?vSSF11y^rOiz(ObL+_aZ71$kA+BE4JE9lTla;6F}_bDgIv!`Md<4) zsimUyHHodZy;-2=NW5ueTkkKw2r#o2GqQtCt;DP=t4<1%qTV=wAYkVNQ@z_F}_YxR%LUoO6Q& z=d1}F2|Oy@K;-ySw@V1M-fht#Y7KJS2js1}OhG&kA+E~8SF#e|N#Y}kM-_%OBUw1r zjqShdaa~*`dUWF~T`ZobljCgD_z;^GuF_I3miwK`#qezSvQY9Q!|bCV{NOg>IUwv# z4>8WJ3sgwf{ZFW#FQG5oA7)`;SPE+`CB65-3M~>2**rtKs2xGkwxbre-L{DLn65UY zrbWHb?=XIEKX5f^YJ1BT+ssSd&^XdAT2-fACm@B#eFP+eHYCbWuHi|?Iu$M8^ItW@ zs*-8mg-u=0Ki|}t-C^4iv+J6Nc%T9xn z#DmC#o?@l3dO_ARZb@4)qoUmUc+6=H@nt?a80X+Ys}oE?gK@OHl~_86xa*yc09!dZ zJX{CltG}=keCpnQq<+>LG5MtdCa+3S0ZCz5%NGG?F1~3zvW7AK>y~lOPvAqT}3IVpB zY$?5lH$DB{e%hE=(`C0hWh&g3rMHxltWWVtq4;;$W4eyl0B zfCsn40U+cNpUxG=RPZG<%26Y#LF)p_f*M)eZZdPXm2|&e88-#alHwU&HsN2%NWd8N z!_xrjd7EQ z1jb#D#H6K`$w|l=Kgz~DYGGzB1*Nkp9DqLj0yuI!{7pk26D${QvMSWt(lB_2LehDqKimmvTUrq;spm7Yf_J)b-s{OWVjP2%Nk(NUEm z%_3E}zVeeM8RJ?BaOa7xc=Jyd)nA1*J|mY4V~S+5g&8q&RH*sK;}PDb^W&Z#{jh4! z=#*UD(6MI3n%Gj_cqxY)9a+PNaf}q79(C50^_?kEtEt)Se>I;KZxUf@yjw2pb@u73 z$w>KJD(xWnC>_##jUm++xRx|qq)#lOJM(&y`=nIuTO zTi&(K8zOvVl#%}cT92_?RDW6AZ(3!GP09O&JH#d?xTx+qhMwc}SJDEMlfs}uJ;0rZ zGm#D}xZE#N45>T|OT@3NNY3g53FV%2=DhqjHy@M7`(3VDYgt20Ybf1Oze{(d z;Qs&sfPKlWngpj9I~*F_9PEH6Q zdDCQ&61;$X_ot|`gpi3!7l*W+6xaU*?&0F~60u;AF_5Gg!G9u)2V>Rf?^ZR(^khYFOQ9DdJhrEYr1z zYwRL6{BAy?C<*$33D{G0-eYiKu1mwPfw1vC!k0RSthF`V+~%IF zsWH%S+H>9G>!1=`=Dj{rq&v+=vOAJIE46~u{o3Z#Mpn&v^4amd>)H#Aev5!#20KuWO1?pUzoRRQ#tv0t3(~|B8IZ5FJl&6s94*ov6eYxoCP@8Rn z{-qM;&w0?hj78xP_z}dDgeY?(1Y{G$Fh(+Dc1?Su^%A`{>m~bC#FQ0BOUmGsR6+(2 z2N71_YytBiR2QWL+%I~ZD$0kT$A(HcvGIIyIxMM(s(;M@hB)52A;;UIuWt`|#m#PCasRuLhzD7nMfbMqJAe8Jy@gWrVF zb!&c(-M3b&QeK{IDNVjhibK2(q>u?hu%+&H$tOP2WcK!A*)4rk$#5s>iE>!COJgCbpfZL#Cwlk5$2g*+P^7F{dVNu>;>2X&H zwTBQrCKCB9aQwE9dU3u6;~wBEYlsSzqEp(~?yUEB(q)$28BxN3=TEwZ#9*IJgjFKi z@5o=yWS^R^)O+cjuo{qs2EtJ3!Q%NQfJE{OYQfxij{4?EI}wpjrIffGit9lN1BnFk zJ9iptlMUGFNpxW=`6sy09W7InZ<`uKm~7l(u=iZ3a#p^fYsLbUl#mG9cI zv|i-ui{Q3Piw@SLA*}d(+PW7XRAyqXL+g&g zC6*NGi*1wgic_2=c zJ?Yb(y*{L@PT_H7Jb?)a;vbrLcO24k4{RG^)>0OmmM$IeeQ^W1MhD$CpeJj%Ma-Gh4$K6mqTtpA4BCko3L`ZK+Rxry- zaGdu5O?lezr5Dl`4-!z2eR$vNCV)hY6(3b8Z99yPbeW^KcREs#;+t0jPZEJh0P`nd z^_l|m=*B0Z=u2253VjNatdI1IEBgrdRE&~5J$1?~bi#=q8+i$i${7P6MIj^Xt+ISJ z@$#SmMsP{V@T}!PoyIwhs~mtGo%f*k?o9w4hb)1=s)cXrG;hLthARrd!Ol1FpfQn* zch-tK5;<1O2bS3b!n8Yb`lt?19HdcLa~RlTdMgGawrQo=9lr(?{{VpXY01ge68*a) z_EYCb(k+ti?As+GAa*(uq`u^o1vFN)j(ujX`U0+hn?s6UNkp5!I;R0+k#q0|PlK0z;-9lQw_CeQX($pSLP|N`b}IFRtKA zd?kCFrIESDNj$0naC@etX;&cMw5(~(KFI6cMa_m6osSU;Dgftl5(0C`4AiQ~)!B_B zsAXySXuDn4YD2LkL`qVZUwf^8Eu~uo@jaA--OoGKL8$sZ=}t=3CikcBcO@LFjZ&MC zGvt&kf$U9n3Ca>ui?c+(SI1iTFy-CQ~DRt=W=eIIq#T;!m(7Q_Z?Hi%+O4Q}kb~B`9MiOKa+e2KxZoNaxQr zJ+bu1pe<1S9^a%F0Fj2`wFS1+>D#lmMt_vAXsxs5I+Yq#Qj*k@OIF0yn%vC~5jpmf zX?T4zvh_}!nw3W0k|bG_ zi<}j->&0;j9(fL6U?Ci73gYl89v%4|8hBiKQwvjk2!p1O5`ppuq{%yD)fqLeu+n;E z>vV}vTfH6S6eK?lJzIfFT0mIPBur_!Sx z=}Aj2sX*kk+LE3UtYG0loNhT18m(|TK!JRIJNkm#B%(S?EJ$(3L+A@5jF5zI5PP!W z96%CA32WBRUZvb&F?+FHT7+Y~+oIVasfck!ZIHD_aV0LG1gIV)5;NJ}of1>~DJgHW z;OyS(maIi*nrd5>{twiCfhIJGI&E%2ke3>s{H9w&;nJs)+o*6JWZ<~7jQ4Cv7^-^h zqa|G~mn)r?>uxhRN@MW>vWm#vYbxFQg*gj*;>&82BQT2=IkI~1Jr!Kpp5 zoj39V2eMi*tH9@ezcpt#RktR23~?6osL(l<{CG zLx_RSvCzU&jMkqV?!2@48tQVDN|4ZvMwfDNPCji}-_56OT8`J7Wyyv;CIf6*rnW#~ z$OwHHdI-+F!C3R!W?sbDab>8(0x%KG5!yT0t3Yspy- z?OAl~!3~IRxov$RD=0W|0|PuIL+1GBYMyD>=!XpTYH85wg2LN5N{0x+6_b(xl?9D*0_YEznq{y^c^%udi zQG~pS(bV*Je>0Cb0297zh{p?EUau6d>Ye=$KCTuJX=5QMy}CEx?xpBuAGm#duq~Gd zp9;|!!NnUPxJR~Q-M1qUe^?#C}Z>9HEJ+_`O=h4p~zXNAWQw;FhNZlav89tu9u zSHlGcuSUA^mEZ2YO_Ait$~U2ikCeW&mivs?DeYOox4N+eu-c1^2FhGk;~uTEx$YGY zyK1K$Jx#r!fHF9bHF^2fwJPIsy0o@T8y2j!p&WmVcp>J{esKjSgVqnWo1*Al6?6++ zc4XP4o^1(;eXvJx+>kwI*;ThXao%eFMrO(^stV)y(z{^!I5TfPupX4YHN%I4i%BO!SBs4I>0{C z(zLIQYXI;s2ZRxW`)O98)3Mr&Qv1_c10ACbpRm+rHcM*MLwB?VcrGJ}$JJf*I-6Iu zADJ%!S!=hoADHEGy|*#8Ryx%Om4}%sAgdlj!29XGMbcnvhE#Mu$=o*Z;t3wUW7uhR zzfQ-D^HSG#x4N)<2~k>iK3L5!m1-`$;*&1jh2-#u@sBP0s#X%Z?5+(zh>jBwyDNiF z+{SwfAu3}xI7Unt8D0>QqFYk@0>H=^&PES9xO6qiyMCFC9z+zd^48;}h=@{>0+q$a z02?Jhg?n}zRbOnkUM%-UUxZ>k32P9~(3Jzvbd$(zE1X!@2)7w()AvNoU^K`v zr>ROm=Q|;PDLgy;pssZ~Tj_Nf4h4*Cc(M&J%!eLT>RE9FsWIb}EhmJe{xdiu)IGwp zrqaz0auQaYZRM+j_%-BFSEro#&b&yR(1zovN>tjM3Mf}+ylh6>8eT|ALc!#%&ym`; zy|W44lJAk$+onWzQdaVo6dXrmnsLu@Twb;*DJC0*2|dX?NcVSrRJ2~Bmxj_;rtZ8x zTGtDBjtWjsXm7P=fM+1$lE;{~ng(MEN>BOL`$_iqk999$O}x-sD?^FyQdOGe#iKKe zMiK`QBRdM}03&SH5P`?I?g6x$pS&{v&rCG|9($>CYt z5#8rNI_sPFq`a$aMSk~fUyV@t>KuoteYF-sRN|7_5T&@K1f?n5l1|mPc{t}oZpdJ% zY`9xlLXnZ)2q2GWpd8p`5ofN$c1j{86N_@Br2x2sg%l|{+;D{gay_}JK84pa^rI|V zZiV?a7gDDp=AOn%yGm$`gMmy~;1ZY-Z)cH3s zm9qP84i~}s;~ymhC)LlH#YXkoW<|J7(zdCpOIGHPZf~d|wMtUk3UHSc4*~)dlz=gu z@5NRB07aV8S4S6>Tzv*b$4ts_$5$IX3mXDf&Q94ol;;WnX9scVLlV`j7>TNM<=Q&L zh}<1%Xt>Oj1KCQ`#FCXH@edJ_6b=Rep{xwevBr2Gr3r|W7Gl(>%GpKovciVIg%R3L3Bs^Dwg+li);V#5C!84} zD0QVW`qGImAw86q!c>jbm6C7@j^`Uw&88)tt5npwWjAweDO?h^!EM5=@UnL!2RyeP zJgH8<(~7YCA96zgTdoa-wiDY7tAMFV$Qc+Q^X><8gFsB|_gHrgIO@XPeD!D2mb8Ml zjtSu@1LXitFgX%;$;)NuNw+@6jYDTqn``4&3#=(Cayd!eXd{|32s!1Cql`p!?5R+$ z>`Q?qrsg(#Dp1RAkD8O`-VbpZ6=8Hod9mqxBZz(Jevsmo*ESm`u6HDd7u*&`5>z%; z0Vi{SEA>QLoD4eKeS3q2KqC!H&vrQlp73@&`R-!6zeu@khNoaWUV^oP>HsTTb@z?_ zXxj(G;MLEpW9hqt=5>Mewv~>uL^l(_l2xA5Wb70VH2Lk`3`z7p?4aAJ$l>5+z+9aVIWmrHPZ?|^z)hZgEanJ@5jmaFnl|0rKN3Cgk;|>axfSM*hlTS`KoY;PTBAxe2K&qe454V-=IQ7!{CkHlZLeN0)x|ve!N9 zbKGrYp+qTY3gJ5)ln>2N-v`gUWKtj$b~vn&w)yj*Eq;urTN8T{`q0m$Hl~^dAt)<2 zIOo|`2I=MAHal78JdVfASD3By9D?$e+7RLwgu2T9L!&vt#^8=&LvVR{+nN(uHsdWs zn);oNr3TrO=F4ea%{|?p1r3xu4n+ZBZQOC~tc~;CT=7T= zf%5P0u3+S6103sccHCnW13tNUHKeCuST3YA5Au`+P`6bRDb?czN;PxzqWNyX!y}uI z@emI%l#}-2sYyqP4T9py&#%mjjGBR_>3IivfA~)`!kPqvvbKkKv zbNRT%0As&#LGU=q@U02QcK!8`cICYQD{YQ(TAo?R#y?$ToU47SnE;vtN3eGkR;wqL zHlna%J7$_)l0*13ry6Tf^X+O$r~5*s`kq`=-Quw?NUG?l=u7NPaK;E^scA{UTEf$f zvO;`3sw?<5q>U|oBR#D}JNwFzYsQrA-q^V7TaKYY4lstEQeH`M8B2u@1SD;d#t$vF zBpuPzjPF??yQr#Vq---dlb`i2)GuDz;=8Q=5Oy`DT3co~b#t`ylSD!kcvaUW#PvYr z6NH`|aUHW$OQ!Q}7~A*aw*LUAcA+}nRjli#*pWWZ20{@W7NuMzG-ZOKQ^eb94X+0v zae{qA6(klwDc=Vp-I}N(U6mVL-)->qkDyMGaavVq)Nu0-c2b;mE({QHgy0oy0pU3n zY=_zs(;TQJ)izuR@8JXjedJYpDn_H!H+#LFJ?lx_62>~)1^=QLl zX*e{Vem`3W%YEkX&qx<(w+kcgIBmAz@@=F#5P)SAiMhmKnlRZKm#20bo*r6j7x^5 z)3GDE+XbaTytLm@AQB2!7T^hK4h`@MlBIvqk(%troxR#t>vqqCEf1`;%oMYC^r$6e zO3C4MzOf-G0Yoc40bS*uBrSEPuC1AOIcQ6-BrSPU>uwY_0wojY-e z+e?m+*izLWgd?lUczY*(fXL;MRW^mYIX_ETwC&A_eS8E3vo9$qZM1@R=XHM3H%}XFy7Khde+jBu{UzhV!0MEO+6ueMxO4oG;ZlnucUfBh08-ra z)ru|Rv;a6g+mEZ-i=omvP=C+{^VL(n@-w<@Q0IbZZw5)1J;%tZ^vYeFUXv8!SSXBZEIoY z(P+D@CNqm{qGCHIfkh_{4>8DK@}zJ&Nr`oCd+nhiauR~4m~}28u$-r~Kv3Z)J0I6f znv+%-u-bzW)w}q<2=Y+DEu@p3@srQKw8oO76yZBb_dQR)E3Fr2=Y44L-2p*Dslw1s{-Npr41PCK#0Qhjw4ks`?;E7Bmzi0)49 zfV6w^2=@v_q3tSw>h4DP^)<8TIKdz@}RY>bkrR{*jhucn8`$hcqn5WNgd-G`SPYk-L!+!LJRY44)NhDaXoWD z`0*zT9=sZnK~3evDNyFZzr{()l9dOQp8_^>*lRUjPs7(=(W`F>t3PRF#lIuE)v$P% zd`2Hd?Sp>qw*;bv^X*z@e1;)I)wdg6?Zt^v7b(R$SqV88+Wz{xz{FnReBIg#-?@uuqCk`0@kW zPcUF2Tt?V!o^j$sO05Yz*4iFw&z#kso-&SH0W7;{2lMc$C+1?F_dbzTAsa5F3)xJ zWY*>*x}wv2c-yq@uDDum?kBMtO4g)+lasjlJp1xZagC=nt(`3kmCAhJUg`A_1E}YAx~hY zJ4gqzaj`1S2nWaZXCtMFrP!UWi@!T*5deMWi_Ol zYso)>Fi=MQjMkE8$6^|om2E}Mw*t5$t)QiBlB2?-jva@21C?$`9PoF?I&Qp2nI<6? z`D{g$oP-$-7aCAdAf-Vh9AkWgkU;YlVyh*n>Efo=X>d#3V-a-hDegFIvsVs42~mjR zvz{T5fUG55;{f*`3Tc>^E!RmA=Qiq=+8g5tXpR7Q@Jf76`QPS>S80!MjTJu$NrwEc z5S1z`N^JlEupCMx?~|W8aBlePljK9Vw2OpB*<*n&Y_t>BXN6;N$ZT>s?N>t`UKw96 z2Gis7EQcxAhgB{OuZdjz89D7&aGZ~b{KrN!x=2<|ID6@``_!>8*>SoX9kS6y$E+=M9@QWSU=F}T?6ckyLXv&p)T~w2ti720 zj-y)-Q6&ps#X|n%pA`HKy-Wn0sx`sCUJgF0%A2+kv$B#$Jhqei=_tGEs9Ujkku{<7 z9&-yGrE)&eN@D97uB)hiQEmR_q_T{kRz?r7nwMHV@K1NiqPfbHU8f(@@{%-MdwPZh z*HfRQ$NT98$^-?q{{SkskL5DI)G6%WWoHgYfba66H~NJnpL#)IU2T%o=^XOGPuN9q z&(i+@*1uQ#in+`y`>TY$5@clBqY`*8cK-n96{qb@Ws5q}ytj0T4%S|a=tv2gt#jAq9T1`314_p@J{{V6S0BZH<@t?_@;KNjpa zKa(xL+gy>S?*nwi4iC(E3i}NyRE;Fy%I)`tzmX}meW(?xQ=`;!hwqG^Hkx19p-bVP z-G0*l0BGj0uUGl4?}(gOuPIjc**>(RKV>AmS}tHPv2K}=NI!{D{Z#VAeK@6lFHp~9 z{{YfK`>Qq9bZ8!o6vnm;;Xvg~zK<^8X>2?0YkGeKIz0#7`r`gW3|P`pfVi(C`A8z1 z8%gg6KCV>h4T*&#sn+mZ{#>V1{{VS3=G1%c2Un;qum1r0>JRwf%aA%*mvTkA{M1s+tA@n9wwXLbx%5{6^x#BAb#m&UJ~}1 zDG3-H6BeJe8sKGVg?y_u!Lj+1LHkir(c%99wm+-=ryUv}`(ygQ+AYQHx0G)=Y(jJKTek-eO z)PkNQsEmRzq?~g}2Lv66H7sPmsw}Z1Mz=<3=`&;766A+4%L@T1M^*viTZ#n<`6q=9 z@0{^dTXyX>`hZRPQZw}6pVXsLEjMIMvgsR9Snn~FA_Dl*r{JwkjDnHGF|pj>^Uo@D zN}FTtN9H;mO|kZ)^CH3a?H6Y)lHX0z-wIoqZKdJ2Tu%}>laZVq{@!&a=-*r2dOW)> zU97!xvpdzQ=NTziQXE*x;8EEtPss{DD~ALe14}ekkBIyk@Z4?7#HYX+Y&?YF1u8(v zYbiMjDpq*_F}bdHo{jZ=x)YY)NMzfKh;b^GzNte2K_KU3{G}WWjGXhXdL>)gB>b9x z6ON5L*;&5^pTxg4b?O$h=)tT0nx0*N+=RO=A;6U(AP}Uulk`+jNZjCLcT|4Y(W5dU zu6%_)fLd`qLK{+*oNfi)KjS4#hDsy0vgp4Ul z=dei1x$~+P{j4qXZc^26({Rym>%@l~eF|{6unJ3sJU|_?5s+|@IcJ1t(|yRv&rRxs z^!fY-RIV(RQ#u?|6uzuD!doGujk4dCerWc2uZYFoOomJgbh#v`tqNtOV2!dAquCkp zJo~9!T}^nnO)0Y>mLfNRQ5HLGsm0@DAn@d8nO1O-lTsTtJTVZa(3c*22M!XH zq?C-GsPieR@=em?CgSRZmAaSEr7Apk_<=pdp(BgCl`yW!z{m%S%e=ZY{kko&r(FnsIE3oY zTA-(6I0J;F@BxA{f>VGu8w!U`>dmv6VU*KTlXvmAy2UZCF7}~hn*oc1cQwwIi4oG$ zo^iNoNLLcLp6zk3BOqj>CC z-dsQ%ad?5x*Hyfgy4!)LK?L$1HPj%YtXW2ir5zN+(-=rv+>8pE7%Ly>$?i0laC}8N z3^0YLjs=m0B=RH9ldc<%GPw*kr8I$;0QrN&`Bz1vPcH6toE^uquyKry@wH_={JoR_ zdFR(%;YxZN#pXg37M=$iaAmhq^i(^?)(FVnxDAgw$ATW&jXXDw@kkc7Im zaXArW;331_gY#GHI5lOX2XLUV+(zKnn4(B_B$UWtr78fWNh!)!ld_Vb-jX?w{c6O~ zI_WmZjWH!IJ#9fsTuD(b!v~okO}{>4zq)|8bAy~DtppouCq`}PC*S6v^|Lj z?IxgU-DLUD9a>KOxmBak%utxO8KfR1xQ0RS0A&4Ut4PQMcvZ{MjBy`EL{|s~L0iYq zD%#p=KOJzU!^^>Qim?vQn24QX491M6Cn`94Qz^cROLP@N@qFByTu4 z{{Y?z-`>;J#mP$7p>tTj7{I1G9)lKI8&AntTaF=JTZkl-;HR;UcqX?;QZv0|mg$pl zv=g84wVr?U4%9DFT(4IBMAX?*E;bgXJj)_FF0c!7NKi;24mMmQf-(}Iag1`!O)fnAMn;wiQnif!%4Wz}_IYFhZPt@zSADjz4# zxW>F|=rW|SKTg2ZEX7h%?2AO6q2dZcg1U#u9p{sWk@Zjwc1yb!#;GI`!2M$B*k?+4x@>+fTWxySUAFg+aT2?rF82sWUelz z=vPP=Yx_?+Ug*Lkt{f6fy(w;WZ`V|>zBw#2Gy|sk^ zoR69aH6p%U?fP=Yt3CB-fWiT7i7%rOo@^dnVH^l^8*r$CNF-;vX<=14rwKLTd#}}f zUKS&(MLF5DlgnRsjl^1WP)ARtv2b!jOt$KJsje^&SfR{1<6xynyNdwec+{pLYUdtE z^tf{&KG#%PK@OyYo*<)=6b^Y{V}5z4m+q3;Y>z7IU}+O9&9;Dj6Nt2@6mT~H;Nu)b zZHyX=yQ%CBUHv9XBi)cvL%#u+7OW&FaIEs%bCH8oVkkU9OHJjs(?8vt z`8vD8SJQD)S>>tlc|R+R7Z_KFv)y9K{n?Z&g`(tnsfx0cDMcwcS~f{h+b5lI)RwJb z9@xa2^vjF!WIj~Krp_-XtqD2c%81ViKPS7@Rz97Y$|4peH|WU?wzNc%Ax|M{JXt~l z$si|$9$4*rx4K%bmj(*cZiguq|J;_5j{E)Dlu{{ZE+mgy91sLShuooIyvkV-N@t9rpK+pIkl#dSdsO0?YMwtzlc ziVZ6OZNCEC{{YgOE?QR7!(^sxn?`bYXR@Z7KhNzned4v(x<2F+V=?S<5}R`;u3F@& zXRJIwjA)Fdu&feB-PNgLoN|)*d;VsBQk-bMW#k%68xpbanN}D+KtwU+vtz5=Oa2N-C(y5Upy9LJG;CL-KQBsc* zJhQm_DgOXOUYdsNJDvHkl@Ns$w8<Pi)TKh8e)c6@?jgBdQ-qDQO?`qQam8E|&rgqC3na6)3uutAw}* z3ET~pj(z9Gs-1NYwSwK%e=kIH(Xh?m4_Z{-$#vuHD^Jmpw`JU&kQ=ut6HriGFm5C) zr6ZpMitTN`GJSQU4_GHWT6LKgiS6SJXSALEXg%IxRHgOJZbF<}ElOqP`o$_QWTVGE zd;FYYqgsmMB(Ru{#C_fqvxrJcnK&JzxFe9*R;(^Aov%)#jrU*SE#=e0V&&GWNmNVk zQ)*AmtfamEBlEW#ld5*x0`qRZl=f8_*AQQCY>vvvQ9si4RC-O9Pso@49JMyDpd=EB2sT3m&aI^+(m&OOFTNy3k)=BzCxGGtavS@TPb;Vnjp zpr!G4un7(Voa4hA)wr5kOHREdHTa{|d{$nY$|z&`x1mO^zq^?;3Lrs4J+RqS+@H;VTD)X9tld)mWoO zmG4Hiye7TUU+OkT6A68#Jodr3zpn_ShijwGB{L8 zs5IMbi*xBhEi+Qe4-+piq`00Fxlu{NfXO`er;SANL?uC!ZIfg2iS`l*P=fk`+T!lml{Qa+$yb$4SXZR9t5VDHM>WN3;EMET zH~#=_T~G4WNlV)K%KikCPTyQ*^featONs2Mher-@ag>e6hboY0Td19-;=Of=Z4a$Z zajxHh+3lXtBVtc8=bEohrM}x`raraOsyp!dd?cwWPW!1z&k^<%f#FjwjuC54jfe0M zlB;r*>Y7Xc0K%s(tB4WabDmk-8=iHCn8R0kRNj6L&+EeGEJSJ1mGO9KQ*ZA@yFVM* z-+Hkv5u-o6NlDMqb7SxS0MeSZ4Z7y5MCKy#MwaAtqBG$3Q0Y%(ZKxBv~Si2f}(`DGj@J<7=CPU>++=_#e` zlr+Hv3>=WW{hw_uvg$is{c?Ji*^?chfSJ((lG}vmAOZl}%RKq>t(9#K++{fqQ%@;v zv?g)DAw<+YiL+T3Hwi zPFooVykrsU1XR}VsJi$~v#4$DUZS_zO4hy!ERW1|Z|qkP@}|i4eN7LfTfn(Oadvmo zU5ZIbS`>HzC>i1a;N)^R9#p3HZ*jIfA{%S)>2n5o*tx9bt3#9$%vJu z*|`HNM_diWr?y}{hNd75@DrRl>p>^TS3bU58ILKVpp%6(`gck1=eRW+K~ri36(MOk z7&st`oLTiqxj{s2Q4C$s-(fO%_$9VGu;o8~>NO_}le+rcaT(O7soi~UV^MAk z=O|L6FX*dud-;#9wSF7TBdjUnK>5izAMW&xq5$y*J8J68vpYC;Q7vX@YPYQ{x5 zhuC}kVUQK{+t~ZLR;q{<+KlR51G|!kvbxZc&N=ygT+s@ZVyzdXwrlj(yw?$g!IK!~ z1dOrGC#;<+6?OGr74!FzMPoTmJ=bwMQWF$to#cmItp-AB!)-!$gW)CwX(U^ zrw-JT-bF3vDNn1NJF&=erI0jp(c1!kpwsa9b}2KG>W8tgs&r=4rQ@gf#;IH9JEkNR ztLs@!M>!tjj+0I5yYfx@kkP(26@ay;94k=YxYUEJ(TonnJJi*8^^Z2nPp}0auCS}^ zostYaB>)l0Ql53=?&1ULrkn0ebMpJJev9LAjr)bgm~R#)$!$Rdju(^<)M=_c=UmKK zYSSwJ03;6fRO<45nWsDVNf(&U7}l0;kO&>6$AHiy$Px+q>NPuBYRTy-lc_F?N%^8m zlAmxmsZ*)l?<|fZ7P~obe=mtGiyNiljyIDnmWMT53r&yyZY!$38$iwDRA_d9-AQo_ z$X~gp)%Mk9`;R)Jr<_}g&T_R9e!Aud50y@pU&L9`F|S=`y8i$c%SYi|8oN~_ay}Z@<8qs>vy}7s#g2cZ_eK#$y%#OTVG_x==upjaG`E1;0j?325hIi~zJiOTUbxKGEe+ zi4JMYlZ{y9TyD|kJ1dGodLmm)_r6LN`op6VWv4q@Kp1THx<=xl&dL96_N?;_oz1g>I#kZ*XS64{3=vIp~H|?k?O73kuq4K_nAoiLV&TtB{1_lVI)P#w% zT!#oRs$~a>z%pDUj1W82cSr-rz-FtrBzjbpUhS{74&v;kB@kN1NK&(bla=Idqrwxv zj@32Nw`jM!a-z1CMY*z|r&?0ZRN(COgFe*l%ZnR#(0Ro3N?6u7k?83xHFJOfi+#@|(c_4s5IU9}0*d=-IX1GI!&UCFl_Fihkrc5R4VL3X1fCQhJno_~_6}JfEq2Lr zy4+E6)K*)Rlsp1r!j9)wo*n?Cg*u?4j?lnLNj;)S1AdO_4MgjIjtRE5(8>_X6ttjy z9wUN%)nI8&cc`rK6Bkp&k0va3(v-+)(1{2G0EZKftehO|q^t7{ExUBPYH}LIo+2d3 zK+CAOFVVP8al({fxZ=C}DI**43IY?NVoTHaRY#cQXc4E#9C8~-PYDfxo-*PVK|f0% zZUF;g9<>Fk^9QeuE=%SzoDZf`*1>?0#&{C*ML?%3Pw<=|aP7mI#NB5F)+9o{*;yB$ z`1EAk7+FiLlYm!|lqe_>f_LG_?3zWUwH~6~umYg4o&QB9$XMRT&2-MnoHQlnf%{S?5Pgs?s0WWx-U6qdU zo$@(mt&4@N8j;tf>1Zy+TRC;LE!fCy=Qwl1kcSirR(sL`!QYl(bEBuKenfJGak-Zo zNiBy?dq+6zIFp=%jq%;g3Qpx6=(1dHSXo#ng!rF2S89?Lg|WmzCt!IfcmDwCQ;kb) zV(*nCvV^H5p~VoMA~pzCPRsK?6CfO{Zz^)9G`GpMe~ai(@$II1LZoNVgH zhj<6b_*I#7ljydY(#~?sXB-FAl1JBy0*2N%11|phzH5t9p1i2z>H?dZPBDR8XQ;03 z_jo}F^eu_j^+m82Ohj_UV*g5OdCu{X&`kBA-sB$KV4B)DCTYc*$8ra zV?g6muI4iq%`=1D$D;->{%INZjwAZ3N)>ch;qTb%Jxt zzJS?BZy#M_C%6j77Ee99s3&Zb<+TBY9?yO}>rzR{#wbxEsX14& ziouBOnrU`PZ{Xl@r0-w<01(dp@~r?tJk3@A0E3wS0NpFko}i!o>8n5uwl%ssk)7)s zE~NY=Xh_@l%bCaemnt2sW=@H$OOe#`(hKZ039+bJj@pk3RJK(eYQl#bF^meB z+{gG{&#=d&I!Vv{3hEannX~nZpObF2w<#)$)YOad%S!xO)zvhp_&h-^jD-#^0rE&S zBoDK$^Q?t8srE~UpiOB8V^Lf5mtj1SfK-<9R)r@3lmmj0c_@&c-*>p&5ay@1S*Dn( zP;1d2YpIbTl#QH17Df~lRfMY?NzZ=>semMMIQ39Ux6V(U0bFW*aiQOvXq?^JIle)h z&~7jvz&u|Rr1Uq@WT4yOz7?jE%LR>spX=&=#fu7)`6OYfR?bD;pFR$&erRo+W z3&p)Efc(hr1UTnZqLLMmLV+MCWMl<-&=qe;5|`3XOVI4rr(N_;B*KogjCE}e)6R@+ zHU|gEHFxSsE@(|79tMfh@+Cgvgm`h`MkOdw87?}ZvPUvhqq(eUEg7UV6h~xRB6UtF zICfl@c->M*&B^3EfCIj(oqN*c>zuV%v_y9$NO4_Qc?seK^1=tsK4-E#=mdI7%&SY6 zjU%gXaE04jH1;C2lCMQ~gyqF7c1nthJ39`+DLWdl^v|wnJk;>ZY$j3~HfO#m6@l#=Q zwY_{}P&0OqZ?tL@nfCc_tMN?DN_Zb>z`Ot)Z-qb{9wa&6S7FsRQ7XNkuS@Iiv1KI} zC8wyK2jXf-3yQZcHusoy1=iVou=8YY0V&v}L)HkY{{T|iZI&0FisY8(vDK;UGP`iyP zmQT6eU+cQ#ZPwFhHVrj)g4{*|=RrfVAeEpv)5JDPRC`$BaU>JI!zZ3&7kI5(_0{s$ z)?bw#VoCOqlsZMLn=l!qC{Fi>!`mes|R#goi+r%8rN8(Pu$}~QwWUgi4TT2 zR*n*sr{s_~QoF}-rna?3n$0#x!MCf;i79n>ag`F&h)ReGDNfiRXB;P=eRt7i9>lHn zvo+{?x>25P&&N~U+Fs(*>n&FXk;LFGp@Qr>ZP5^)KpThAQgk|&00~UuH=-p8b!YN+Hu0m4xr#$jB3X9XsIo+cjJ8| zypu$0PTjQ~jygDmvk(B3^=b~GJ?F4_dj(Ey+LKA&63VAZj@0x4#M`-OsE*6WLw@hKl5H42OFZC)*UkEm)SX#T3T*uYnQ3*PF z?;hkHxap<3ry5GvaDu)xIIuSu1du${ihH_2w$Fh+9Oqw}%(L=XbxxK>auvb2M-Pau z7VACY%*1K-H(69%l;tIHi~ZOkSwhr2cCFQEQEtg+;Pc_F?mUVrx{o^I;*-hQ+uF62 zyjmg;LS{w9mKv26;sVO!zLJN@NcpG8<32{5-iazC6qG)O$X?{0(tJ1hIQ4>SwbuJ^ zPN0BwW{-G=~@XuC*&micsA? zJ04lA&bz^_I(2Ps?N5fULw!trN|0SDDy)ApQvF)^G9=Z`?P-$Z7YL3hWjs!_IIu_N z^Y)H(^ozZ?+Z%K}1=9Pau$Ld|P?c?fqmPt%4ER&U*H6u6T4ymE)0Eg&6pRu9%0h5X zRk8h*Rcaad)UHO2afJ?RO2Xk!X-p^{?gs}W>aMXEr;VW(IN3b(qjh||JafI)uQH`a zlk4YBQlj3+rydWj73ANM&rl~^XshhkBfWN2xyA9ecsj-p5FLd83IQ9ATjMyTx_4K# za~*8UG-Wu26qyhlG1imbae|!l&INI`#kyIX^q4X(7G=12*VQ}MM_=-W<9zqjku%cl z1tRUIn2;PvOOl{rr7JrX5sYu-4OfSFoFo*XJNU)ux4FF*{1lt;G*#r>QjOuGReo1@ z?7Kb!rtdLsP@6!!g4)jmXt_7ls9+s~smfK}aJ{?nPFDEh9lfJY` zC#wZM={sIOa1>9Ua&tSu}0FH1xR^dOFTG zqr68RTv161Ny3K-9yO|XxYm-6CYo}3uAC=t43+%!Uzw5^$>H9Pl%U^py`P$vzajCL zB|4rq9*J^n3~@BDhY}C0;Ci@)AiZ4VM@wmL9N^;0(xj#IM_XW^fT7kYC|(KWlY@iZ zN}i1&ZuO3)EY$;$ty?oJ@q@{t9bz>?m>T4wYQ(afQX}RfnQcC37n6qDF5=aOg zM1VQ&0UP_zl}A}-3|WpunQOUP-ykc2M?AampbvkINbWjj_iiY-J0jf@A^cnA`6Gd2 znB5t|fzAlr8t($|n5;Od+MS!VskcH4Pr)*Zay!&`Nb)BfxmP^xSa+#z^J zTa8uapA?Bv#9OJydVw{HY=$T&_9$u zU#VdF#bs;g`eebz)zS)k)Qojx`nyC@nHGrcH|Z7&6brja->gjKy0rSx3Ql~6Xz_IQ zPAxwiy>%VSlyI`LsYS*3Js(*&=*EfKN#(SuHW!V}l#V4az+05i{N|42Z_PYHqm9Ox z>%0s_eQO7j5J?BwQW@H&^1{mqiyi{oKOr&jq>=orkxFga-c!Iaeq6^{SNZ7AmfTO7 z8N$6=dh1MFt#oKgE;o#<{8z>|zX7?M7NdEjhiuO+uNTwR+4?`PGTL-&RDHO~cx3+oQ%}%Manp7uoLfwz-Gz}``VOLu;*|g*V;$*b)Vh*Ck`6^T%x(jV;knynTHilC2q7NS zo~|V!#>8jIJzSN%>Xx5g6cT-@B9tRj z>PuvFh_S6fw2TQ6?d)-%GI)9WsT7M8kdpKjw>gib$;yYVJoDr;P04aZcL_mTY8w&D z-$_Q)?voq^KmPz;&%tE)e6GAiFz_E( z!5h=V1aN{gOlWkXVv_0<+KJsO@u!&lh6O{5TFNCC7iKF)`3l;bH3abulGAD(a>n19 zJZW6%h=&xqt`e+a1H{xeL#ut|y5{O9Cp8Br6`Ze~XFaMgka>G37~8&p*+(NZZ=59M zTL1v>teg>m25MEK^zvm|B;Mr|xRyu>j0g@T82%K4-Qaw)Ko;9ecIZvXn3qh|9zLxb zb!ouDe95I3=@B@ddJ?kF%284R2{_+xD(54JDP@;S$>yS?zCUg$`=je5c@BQ*FO2^H zgIVpyVZnsRP~0t9`GX$PCB@ItiZS5a#v86km}T+lnfvr z0YG6g9ODZ>+$bF6gOSLBIUhQlr=+{+u`_Ru8WKPVGD~}r%#uEtcocc0{;^M*5;d~te@t2Li2O(;9A^bYa|!(K3IWdc>Mk}%AHOb4tGmkK#{uAo z+DBz6@#4-g`!_{nnQ(UA&8<4MHaV@#OUZI`!Klk%BOS7olH$^H*T%m~E#qeL3b1}iAY*NtCrG!gn{(EWNJYUbJZ=OK zqn;3gR8+1tm2u>Z4Zy+N2TK&$nsF@L)d}b94$TfN=2=Kog)@~QlCpkB7zIh>cuq2Q zuJbKY2f^Bg*tY#RzVpg$4hQ(2X-zB?5I9_L^@>qSQW7{02F8G!Y3**S^k~|Cd}b~3 z;Et_PIB?s`Njz)q9EIm69LD_fnwDvq309$q=b$2T7zImh#VNk6Bw(wLyLi)u5|g4k_^DJtQ@hvLRL<2~8Is^(Re&2YBc?z1m( zEivXvTyo1CC2Dmgd`Ll1;mW%`r5s~Ba<4-=lsbajtv0k1x>k@!md2Rx+Fsdlk0mI9 zZ(7~Ug_$wZ;|@810+gI&9f0CV2VtD!6a{-`c@erohi;cEsCD%vz?rgA_2+RMb_LO zwp*e@bSv@kq!Q{7w2}zs6b}%`XCuy{k6CpT36{GB<>qZo&UiNM*(p+1)-vKljwyV> zK}t@?v^P222L6t3OSDdq{OGu{GN6ePTXeKo;#NX|DIoZs&^W;-0;HPG#DrdLw<}T| znG$p1g|Q`J3qzqBHsJt{EbbM9k+$OotPM1qPMgr;-Kk{QkaPAg*h(T zSxb5AKqrMj3<5_8DI+5X7}$Abo@_?0ksM{>6b8w0Nhwmd`6?bGe|29vTK7JC@-&XK zf+J6HoI^6V>s5o0mHgqbJ~+;5tk>E)?WV3ClJsQyJBI+0ID7ji1D5+9V9*sM`q1sN zVww(h%%g<$fh2I`X9xE8{X=_v;sQg1B|MLasYLua6@6hYL<9x4c2WrDm3Z>l@;`Q;$2|8_oOx^!o6)X;4|O-)K^$;oE-W__x$s+q!@)lP z01D~Rj)Wm?UvKfwK?{iu!SZ|@aCe^s`%d*|oRUc%bOy>KoRw!MnWjB_`HiFJZ`yRW z!1XC(KTtIGaCw1HUYX^VH1tLgcU#MJjPnU>f%U9V6zdA>U0c1&Sy#O0_0r@V3{ogb z!f*fs6ss900MHWNisFe{;wy^zF&K77sNf)fcxvQXAOYd^RcX+SU*8vdff)g|qonzg zr7P@D)mA|`JhS?!0V)KX?OSAH&a_eWQN`bljRBs0)rFrb(5u6ltYq(+17sWyM>U+_ zW6!^8=f&AOZHhR&yQmI_Bvyr3tSFou4EI+xXF0(X2SgmN*+pp2@Se}vMPS5s%{03t z$MAM1_a}A(Kl1$Z{{YQ3XQyGkR$qg;;nFux_VoOInAMbb2De92GreL9ng0L`T24+w zuH)C$Dxj9TgbRaK$PyvBJ6MTKu4}Sv=?+9Ga2G*vwh|ji1t?G?s084Xy-u!MUM%?k z07~!oN~bclV+Cp>!lZyZ=OpjG;*j^Q{y>Clmu!{{S&&TkfEa79Hvz^J&rs z{y|(SK0wxOzu^5~{B;rN_h!8z1Pzk=K`3x~)Q+s<{*7jrO^(dCZ$*-jwp~JywD^KB zeyWbXnYS%GdXxum)>#~El^z~B4Ga$;9ce^)2Xoy(RXX+lk+sF{WvVuuySENBg*E8u zY@rL`Q2|SFTqq!oz|Js7GfsL-qOE@wfP~J|+GR@zj!GjYT-9fo;yFrxnnhjqdxIBS zWNU;~scc9x?jyK^vGWyEUHV{`D^62Dkh7Rpu zl@+NxlA@(6v`zrnXK-CK@;XiFCg7W-S?jRc$tAfsC7?ewCk5qsp3ke5W0Y|!t>V^> z$NvBY$ZvHj2EhC6#O!;i@Qq>w(8qy<)kHYSqJfMtsmypq~j^>Ipwen_sd$^)HVb{_&VWoL#!(Mz`;RE zn?@5P5PJk1fQ6N1Cj*hpxR)|_3h~aytmmg5)E?~qPtfN=FI0?>IFd1xgT8ykR^PaF z>t2nmmNZC6(lN;Ejz~+A|>|Os3u-C z)Ar}uxC9xt`-6@$lJXD`9&N-Wd?cYMN|I7R;7Q0i&rrqFPbpdW!WP%d^+l(G9%yCM-c!XB4dmW(BFFWqmHX9 zDvwEiWTlq;_%o$hzO78xicc-%Lrp5}rEHgH?tYyzbhHNGN^I~pkb5aghC;|HP$Vd4 zmH^zOT*RS2iw;F!Xe4Y2LbLYO(Q=J3wV9WcTiINA$V!`tRHsQ&xPDk8 ziyQpVd-m_C^?O=u&RCNwMr-i~O={-kRNXWK=LxZqRW4 z-6XhYdu_Ftt4Y5B$dTTUa9KxC8CZR;t<d3X2JvgGu*klK$Ej!H@E6jlp| ze&SV~ygaG}rY$eYl(gC}@$Se8>pHM_9!3b?3c=;Kwx!nZPm6IuItxBc;0J=XIU2(B&aJ61kE9rPiXZm$-LzD|`uI$?;b+?Uj1BHuwd+)|7@wTgPxWx0fhzkgxr&2;rO6{*Hf}`SEUs#WFQ(aDVF4A zB>w;fbcPg@V%Vy8(av%Bq!RCH)$y z_&ZA5vV3G`rCy94ojGW~%Jpt`&(2Jgxen4D9|LM+#ym&&ywxeBocAOw5aITLSzWf_ zZ)`4QfrYlB9#fIPP82?IBPa^<;>|=In{$jvnQffXNz03l>SZ^9-NNP+2mN8Lwp~Kp zH&DBE!4Q;5JzH<8C`j{Z+%&a%l&aS$xl_@p1ox|Om6F`sHPo!?XI3$t``zIu=QX2# zG92HTb?b}pH5!q|t|YT@jyQAN610JjWp_o+=D6u+RcX{*qo;JZ+i8~2Ir&&oD+l?< z)k_-GR>z7?ptYd9>UMp`j8|L(y-p_yQ1S;9>kclJm>X0Hk}b<#;bB}UQT2|=9%NQ^ zOASf&FG;G_?1(cyE`K0(b=;h+$HaFQ(qUJnA1to{nczTq)LcL|TFh)EE zYclkU&Iea-eU}n)P||^IWcicJ-Ak_;URv1WG2vfh?i97Oo+147_xh7eolL7*!%=5l zZ7WFGmk*>f><1+y*edNDL|yu|Jo=L2x}|D{lm3*Ht^O}Wl>Y$!d&Y;XGo-1;B-$k; zw~@mlHc))R#>9JbNv%;P+M*%5blFz)jQ%MuakO_oHC#jL9=egH?SDgSFCiBw_#wj$ z!01r$Ejh*R1vH}Daj?5?Z4x6i!^9pWHra}ja&|%%N;96|u6eE>rz1kWOCSo2s?~Yh;pxX_nllRjktr3Af)4rW0JT>2_Cf8 znsxa)bJB#}DQ$OI`KKkuMUcY1RO%|1+T3LP5|e6A`&wQiXj;<8HuWyhXWcE+qJo#0 zT9qm|85`i^ZN}stbnKW-PfuA95htW?$8LXxW7gWfzy(1Yi)a1XO}mJ4VcgzmqCy)< zQ%-J@R*{VN4aaB8H0#wxl&&^hX-fmMC1N~+9Oc)Njl9fn^oHpuD zFhWNPljc&dT_A4rdyLp=a?v?~{$g`s(dq>5E0k?ZU_;AdL|K+sPs>y%dd5~ydYe|n zSDhzMQ*J-pw3oFTNpD9Jqlc^3l{I(wsl5jGB;x&*<~YdGkZvh5H09zm?VO&XLE%z- zxVInD;iemnrukxVFG!0j8iDLQobaDg#>d!dO9nI<%MaV7+vhk4DO6`Y75@Ng71%zc z;+&Y7e73lxH8Y;-jXl1lOZ5^;6g_16s(Gon-YZFMU1jOkTjI*y8j)YqsHV10sQs(B zag=or4DE|OBRy9hVZk}FR zx2;QhMrx6c9j{GM@sXli#&>Kk&=akVeg5ZMJ1ZO?I#e6iXd>g6u~0B*XW zOzO*O)gWwcFw&OfB>1g6CC_dZ=SinP)EaJ3Q_<&Jod*GXq$|{+vGy9oy=b`z>U9mO z73Hyk#Nv9lfbb3{aZ`G!UnwsPpUc%2UahZ`merH_dZC)_q-J_MmrDf3ua@TfOL`_l ztPlEDk8K>e;HS=*nba4Epdq`Af{t7ZUmAVN$f;Zgw6pV3H%@9<+G^yJlqOj`0bT** ze3btHC{v~C)a}8pzgFqu{{RprI)cIwvail{wDwjr--Ij7*HOhqJ2z+Jm;CN7qmPv- zSxLq4f4<@mA5yF@tbW&Tabuf^Y_hZMY>IGl{{W|M2;jO}WFX@KZMc<@^#k?Nd$x<6 z8WWd0M3yhMlzgdBi6x=`07}{IwtZtBW}{~q{gtvcrE4dV=|hw%wP$HB6rb)%J_54O zKj3@&A!_8z)E3qKAP6<9`ZH3pfa`YU$ZzJZI>UhavAsuQwBxc6%S^4u7D7;zrzJ$5 zKn`U1SFN-LnvX8iA{w7xqeN|(tT@_-T!p14i;J30Qd>VK$H#9klrgnoR-FtyQ=G2z zZ6ueHZ(B!+%DkyKRi%c)QI9!0ySYh4DBA7iC#|aKo!vhF0R1b~H%&2nw#rJFX&0!WIa6&Oq5lA0PZCeCZ{RDMo6&a`jkeUZ`y7)NGKm4h^*MfdpSsqTk zM|IYKn9A67uy__bO{>;>y>%RM916xgyv2J~UV+n1G(%D>IQkDT+h4Y$m#&B`a~|}S z>9~)OjAj$Xw5VjCl7%D_zcMrKs^2K$R&&BhF9!$dq;UB|87{X%nqCv7C&NuW{)du? z&muWdjr^<8SDueFWw0PUq-Qv@lD3f{z_!PUN}L5>&Ehnk&(LO^y~ZwEXI)?0wAhr& zT}SepC<;%sR6bU5SDqfC{{RjD0LtH~U0zhLL0-NNv{(NC9q;V9dhRJJ1*z%kRDl`C zqqUOYLJGDV&fUYj)S7&SKGG7Tx5U8U52qv#5=VLDcJC3{9S^+hw-B4-?cjwip$oQ@=lOej@$9aDpJ5}FMJTrwFAzMuMXxGk7bZ0sQ{ zFycoONH{7; zJ1FlT?yNk^>FQZhPUo-(jaar$jkO{jTaw@*q5ug!b861~wpP^Fe$d*oEx~1W$K)Wd z47|bu@{!_{6>#G{{P*^1hbmxZ^kZ5r{{V9Nn<2|wol;sb#6iCaN9IS-X}Ge$amaXS zL0DEs;Cp!<>O|~O8cLNR1h8H&>kq2rTt$SxQFZyJJi- zt&hCqiBluUWiee#az7V>-#wHL2ss>Wl=fVdMn_oS<>Z`|45NnN z+}$`54|8sluwR0X%H(i)`VTu2t*hs(dvxoa!B&`XB*~g{#TgBD#yRl=%N}*;{R^%y zx(7##e@HhtFTT(ogvwr1(-M!M<^~nq0nXfs1 z*pe;tA*4u-+peVb%?`_*kUJ|u#zs!YoThLmBmz%9$6<97QM)vghZJUeW0bb;to;} zK?DqAB%B=hRG!Oqfv#pUpY-^QQK7ZG%8t+|npi%eBVP z-d<)qJ+RRvp=(mYLF|;|r-tBHnLixl9J6jPTR;>P3- zXB>tAt2H%|`wWTblPV*w#FV(G>f8ZLo|}JlQf;p!BG&T&hFEBPmPkJ(Lpzg_qH~ko268$bX=-?mEN%JT zx&bV-HseZ?m8AoVJjeWEr`;TA$2#Wq1^t4TP8S*?wg~WA$B!Zb<}!;3Rt;b zgU&4k7|fv|NKfaleb0c#2RgFj>MKjfKAM!1&#g+6^^rh2jm}hZeRZ+hY#$zUmORLk zGwAZ;Kjeo}Pq-THPl|E90l5U7`|7Fb#!^)(EMTwT*Z_?8tb=7x`dtc{{RB991xIjG2LB~06a%zjm-gWbWI&G^mugaWXW32 zd=rEZ`=+I2`S)_Btt%VQ^u4kuP6{I+tNBV9KUp;3RIRZ4X zH6$4aW8*=`Ebp4o^YN^c*`N);#&)JFqW%fKN4o%h5?>5Sr;u%V{6nX}WR}hbPZ~3_ zY4bbRDGf6wE3u>?ILpW=S{2?1!Jrx~G7X1QuGHw@wcXrPu#R?WP6>6NAg~LG^A7%P zQFhn0W|LF>8dzfemMRc{@+3(6r6>1$0Mp%c+!!?t?`V7 zUzs@?)%8(pf3VAw;W(0a)SNzM*|4+T3$eX%&12I^>3~ zTw+5DAVfJv7O(*YJ~_uObcWv5-aX7JocLEQmYr$l9!#Y z!Tbs3$)8Px^Aa3WE9!Vu+ld@#T2yep&vEh?`s$&$X!Tw%&pP|@ZgJR-xgs<z;vlgrC`!&?Jhnzox?+`d+2cXZ^PN`G6<5%fEjyZxI`+wIQ4{9>A$5r>Mu z5~Pq;I@E<^9C0I`EP1-Pmu==V5-$n2M*svoRluhalfnyX00nsGkfm`$GG0fa>x-L3 z`~30gVh<`SRm;-!W)%`6t;F>JV{DZ)bz0IzVt{>?@fw;;{MU zYwby^>b#}5sv=waZc9{ zf*xszT9Z%7hbh#RmYeNxBrD}8;Rl!}03EmTsKwVt=?T(Qm^z$8kXA?I$lyKLNmmYi zgx06avWvDVmiE3X=;jlJT%jnxCug}nuc52%H6;$N(Q<~&wFbj(A=1H63Q+6QQM)^^VDo7KJao4rHWv5^{0fOgA2jwM0~xtxvHvl6wW0 za8xng4`=H%$m!M^aZ2l>PKtncP(qS}?N4O=b?8y$oF*aNP2a}SOO4vyW4S_5mYkbj zYU|q0+oFk_4w0<^c##rSwZ$IOklj*0uy&`G?)H1+bg(b6C$^$bQFB01{+Z4_n5xk0 z1V|h=dt^Caoz&|I;`gJ3{j{>(2S{B4Cbte`MF%jD(#x%q4|qH$pRTX3hQZ^rsR&ej zCAqyOw>X_oLlK=wDfsJrbE&~1Y~GBwFzRJQjdQgXWcAj$YFlUdxhEdcPVlW_Q4KqD zgL6T`NRZHN?04_}`e?C6Yp)8nOb!4BP{Yb;&ohN%?KLWJ!(!4LyIjlC5;#M1U}UJC zNlMegGv}Ol>?_Zy$S_a(Gn_p7@%7S|>xq_FdJ%Sn}m#cgWW@@modlgO7{DLJN6 z(0bhB7Draf!cwjIa42yAJFq=OQ=3-Qyw40#DsBep8o)y%fyPSE-)d&j9hYM z#@L3^a@>}>)F5~ER5l)=U2W*Uv2Twm*y9snxX@Dtg_MGmzCu;s=S8RZsl5LHO{K;qQl(M9#piba0N(h;cn*-Cj-|;}!rKTb=QxRO zDT?B-kTSJ#C@kzZB_kLZ+LOt&+8=q0*y>6^U~3$Rx^7hv}MIv@rja7 z*HVjKymonBOolmPx;Jx>uYw+WHVH(?>qE@D%jh03ujMBjbEM8hj^c=ZGZF%JDRns` z%ZOn~_F|u$wOU2t=dinE#}WZ`faD1da@@F*l(?Q<%BOOPO6<)nU6GKM{V}AW#E;E! z11Hu%_0>%*G_>?*MXvbuqsBR{=)Vn(s|!YjOuLq~x-v~KG?$qMcG(fc;v|`|75<6b zNo&*z!iN3XrW?MRkn`E<5c8w5eG$y4k=@lfg%kXeYQnnPZQ(0;$hcXToDWEg=~2=A z=7=NKrTZ&WY<64)2y-A`W4C~z70}|4Jb*k&^#Xmh=~CrM|!-xT9NUB!*z zr=3b#FKja77VRhJG`tFpyK|9l9Z_%vjg;G&Wz_!wHCR6E)PfcI{{U-GP}JvC*ZK)5 z>kvn%)8w>GZ}*z|xky4m^ec;9qz@pW+4dzqx<>RHNk?g!)^&KYuDOi=03l?L?RzVQ zkL2o;{6+Z679CBlW~!6!QtHc&N%=ub%q>tm>6tCE1)R1a`HrcbY#rfc1!NL}{3SzWX~=O^Kwq$_UZt1S`c z2&h~O9XY_}txtBcJ*c2(*rtTm}9Nve^Q zCF7s2QsZ+sHAYWMLeFd-kD^XHqfL2;uKg9sP~+3rn0ElIa4`P>?`M=xa(Islyp?P`xAT6s=}qY_uOXxT%^j2`-tygD`3$tC&b%XC*M3uO!`N5#aa4J?!1o&)2W zS$flYN_i@|SRJ%TUPyl0uBj_NO)Qqk^Gbz9?mZXFTbR?As25UCVmrURWUI#rUdjW+ zDAr}oU#i?6rwvQc%~K7`+D%HG^*_4&o^iL8g*L8@Z#HdbY<^Ho+ge`hkvbZq}k_?zUUsP+$^V~2Y4w?kTgQA58>gK}zg9h+&m zk`SaJYg)Eg&c_@$$Oq+f@$}Qs-h_|d^AwaeozUpwNXLhGtQ0Cve5F>ST3Qi|a`Edt zpN5Zl;^Asksmj%=wWZC%FuMHLo5{VJI-kjPjyRur&g_H_Q2zjVHKo$q0_PmZxI<=B zUq!?Nk1^nU#d2ap(VGd86?{9Mv98!T?<1I{aiqtPGRw$ckUNiMdvx(MZ^EtcNj0JWTikWXl2gW;NY(hmz&oH43w zk4Ip-ry?!(1L%&*vBKJwx|74W?PCK2x~I~fLkR@sN>RcPIjHywZIyR+B!5VD8stU< z^{%o(>c@`>-ANtl9K11-JqhBaI%*O|&T{>we|Rq2r>|D-4@HMABKI9p90&R)qSxMy z*eYM!&5ENYe4Ksg-Ei z`6vGX+k<4;`IjZ5FIZztZ@o>tm8Tw}Ge{;cPCgrM<)+#SN`jNb!du`Y2O&p?2h4a^ zn_09%vQ6T(+61P7DlIBA?Rah-ZwpDsc~ZxfKStJeZX(m`)>c;$zYBNSPBce=Qq-j5 z)&+9urn1c(OAiUGsp&7u->Q0F0_pwCX$euEZ4j?1OX}iMn7YYYll+pGR8l;KD}#Dh zPeG7^tL5>A%UxO-matNBoGUADjHqXsR{Li)UfWuqbb^Vton?AZS0p(eE6N_VbwOWU zcKCZ*S{h7)DjbXzHsa+%Tfz3nA`*uNHs(2G_f|P_eiCa&RN~i)N1nM8lB>5)r8VU! z#csRH$2HtduUbL5%^@~jYMXnb0Y!0Ik;UxdPCmNKZ4anhz+Lv&Eddw@(AL_C@gyxn z&tN^3Ut4-9rJCa`G!^D3Lazy0g(cxv@qLuhq?LSHMhZgA_$jsO-7VMPB zLr*Qa0Cy<`eUs*sk6k#(()UQw*0FAJeAJ{g5~6nS2XjrbI%Cn$r@0R4Y1G!qErG<6 zLeiZ70I79CmqqD&qlrNwy<0XOJKl`qK=qZjN7q$<)BK*hH0|XnFLOqJ`dgCMW}c6f zr4_+RBB8t6qPaFx+K-|AR~}GVO5A}3<+AEY&ly{M0Fp_`BRse65s8;BiQ>pCP0`o* zu7SE_CDxDjofYc4MzXM6*;=f+z74@m;H#@r9V6D16r9%?x`OX0EtVW;Hg*pd&k)<_ zlAt|jKB|8tq_T}liELAqz2p9zpt6N(v0D{M*?OL5v1tB{FVd45&w9Picj&UwRy}8_ z!1mMiw?Sze2BSkv>RW?Pms(Ts{W-9S1q(?+7s8Q~#lzXoOu;tKjZzm(OE zam3WWj*DD0NAf!H&KSzK@o;N~hGzXFw|dhy(#4TYT6*<*H&WbF=}x+^N)lA8sYOJS z+;Vp3NgC*#*>_f_vvrXQ+M^aimg(y*%qhgDg~hTsN;qFji7HV9bxYXeCUqB7M6>D7 zgms1Cq+DK%(t>5hOqmYIM+pl`Q@Xeu>dJyh7(BoNppD=4=DoZs7e(5BJ*M8n>FVZ1 zg4{*2fZD-n=aQv0!Nl{+4S=Nrvy9h#i(a*SBB@qdO}SawER>pBvhMCJ{vO0v#Nbt0 z^U`g~&dFq?(!H#^yM>ziJbMPaV*979G2vgV@4t1tG-r0FGWuI};u1)}<5Y%#-vK%9 zt7^xjqTd*1?wgW62$qDu6R6#8w6Ki5bypkj7w%1wVl58Er9iPlad%HB7Tg_*6WpO_ zaR>=eiWJx2PH~3<1%kU5r?`Kg{GM}O!1-_PS!-5uuQij*-ut?)4@vfmOFzDiJipS$ zW68;6TvJ=iC#{va&PFFvRz+Ol5kuH=;EsnT%55>cp-NmI-WN&pt#(OT?sLt8ES2&O zQA!Qz5$)pAD$b&9qCr!ii)gTW=n2Q~9NUHzK8khz$VH>998=G=r>qJR4A&WWhLj-Q z&xSrw)g&9Y_L1DbToG0C?Kui}Y{9$z&jO!ZT=d@;eFXZlIHkbd2?4< zdHDcMqhI$Tl^$(W4qI<7hxi(y8`N{uF}^sNCt-=umX$$c+L<%8XJ;a8u4?IX!h4Um z`14)!1y4Na$jR!%CM7GiiN~*dnC=$F5!F z_&?1g&r8bLDNIeqU|&_pGR3!ko8{l>1=2ZNy{IN`zitJRnQ1xG-xhy7l8HAD3T`9v z%PxHz8%=hdL^}lse%1Y=DYCmO&5}wOy*rpzQ`N?!>1?lyg+mTPM)EJx($iYD!*&se;sJM42hp$77mF( zPkA}4YV)i$@xtux!X(o`PPnesMWJOAT-7_sh!EOxNTlhXzSZW zHgGpt72b5B8}{ubd{=bE57#*Z0004SfQT@BB1J*Qb{79Iq}uWn^^UetjfD}TA+lsG zJMuGU=7$qD*S@&G4&-I#&I344JRZ)mDD2OmE<-yrJG zhGk~srs5nPBU0@o$mDo;M4_AwIn(F)*+X^`8!w|!BNtbIO@7AjUj#WsQLAhq@7<+G zkI$kO85fz!XZh#KR(0Rb;A$LBJJ1RW-M+NfPf>&Is}08s(;030bhgwHtnZfCHthRz ztf;nKjxWyztafcC+PvsQGM4*2VC)69z5T5p~0QB4k zo}3XU2wB+M+(BHtZQcY6G!_AL)rfo{8g4<2s~-=^hk<#AaW>;sdqtesx>84|>d_P8 z2T}JvjRr+b>BSAOl7B&3UlB~s@sZX(cUodgf8E%yviiO;qH{tEQ}z=&%eRVgvCWLs z$N68_z`_yKd__7{9D5xA=H8|LXc8}%i1-NNew#trWB_$DO47;P_yL@eyTO?+JG3cg z)0HK3$LBozdF!Wl#mQ=WtN)m1Fdy4!7E;lPl6CK##CfPgr9GK>8PnR+pt8aELCd09 z+h>9-8867T>_uH5`yKPnRr=71{%%ZAY1!#rzUJe1jCpNhye(lOqCY(wJu6>x7;;VA zyob+pDX!uoP-c2m1&iJhU{RoBfM10-XY;Beyua}N`JGn096Ja)BAR}b-%bV&hBPYg zI)Vk*ldR`Y)~NCDT<8 z-UFo?DUqKe7VxNp18e|vI^NC+Y@Qo-Wvxr=;tKQx7lvZ4DV>2GUM2Ul&Pe>^zmb>! zPzqy%C@K_JjLilm!-2LdL^&J*rwmi!k8?_tBz&EQQZ5{NjZ`OTv+U@8k!t9b%ynZY ziPuEgN$mm*s$n0CB5J)GjcI}2vPs&Fu2(u^ec9Y5p5FO3^V-|$LhoXLvM5?H$}Cbs zVt$u!Pd__M78{}0kJaaX4^;MT4OIu@)n>DFH;HhEQapLTMiXo9dEG`~#9ZG=#H-Zb zcIoNhGUl`Z#ql@I&NjobHq6nSJIpvT6d0U2a+t~GkVl4gZ9P&!BXkl14J6Lx8$GHx zA846$^xCynhEwr9etwgIjP*G)H6sdPYP|0`VhnPwA6tby(db)W6XwzRk7S(ViZoKZ-vJaY)L{LF9XvyHH0AyZ`l z9Wz*2@ORe92z|G~j}4>>_trmFu}eF#V}5$Z_v#i`akf&(-}FZfZ%HC&MK^o7C-aW{ z{gDbF!djN8Ic|^pvQ4xln2wK+<@s*U?vj^IogkU{mw@&5UuzbiSzM zyQyi*%J>I`99`d}B<)U8tJ%y;=mDr~ytlAS{k***B+7BsYP??^lTFn3TZ@WyYTPq$ z670#16rVAp%6ijmJk(&{2m!bRjl6nZ*aU|s0WvsvLTR~}t;WhH0o~aV`dYZxc-*gUC{y{$s%yRjUA&dDk}l#>$Y>pD#aZ4L!*O7=FrZuMO+~NibXNz> z5H3i(gqxP?gX{Ys`?MUua}wrAFZz@Dg0gU(B>MB(;CBPdy43j4q(_VwC# z+hiuDteqUR$XCbwVY6_jIvgY7KD;2z5oft!udm8<;{(cu!Hm0~*w`@S>JEjIrC&h6 z*iK~Rgbjwh8o;THRQMF?Vd}Cu+b<*Op9ncak|J)l-?M-8ZciO03xERf6XF;YKW5$T zHf~GS1FAHtYcv?gWo@|eaSXF2)(R`o0~FbvKM=no>!l^!FWAdBV)KazVG}EvQ~ltu z)&fs`Z8EX8hU3<^WgPNX82?7P%9Cy0P%Q`6e$dF@)q5h)HgM;nnV%NP-o|l0R^J0B zd&++roj#)dbBK~cq#COoP5V9sJPlj^w;2Yc`_hYR&S7 z-^ED!o)D`#=M}H_;dhX3fV(?KFeiH(lrWXZx=xM;OCstZH+7~!1@}Hn?&|Ut*QT3b zpQiMNslU%$tr|3zsvTx;U&96$w2Ep7<1Iuc@$eaHFxv%q>`a!|`U48wPpYa>Vw8`w zAJYUGe{cOsq6oWs~qX-=>%<%PO*?lwHfjZ*2krUoIw5L`0a`b`6SGuPj)|8cRY}tGS+| zvt#-!Srt@-hn&ye?J|3{ib+H*IqEHCne-~;%b6Hsrme2csZoDxC#*@RLhT@7m?HaF|u>H$HbpvP4##6w_x<(^g5mfw=$CjN} zmj}w!1PLR~vcn7v^T;&sNFc6wJ9k+uaD40f4?bWiib$E`pC<|&WKCE?JsjW5g=|^xAO34 zjJl+e_NbnZLP|d{Tg^S^^H2U?_qvYtnwBdoO@pSy{UEX>ZJn%^@JpY;e!}flU3lAN zB})f8^*1+$dPk>sMGQ&Gma@=9{&X=5XZpf;YQn6uCJxhVyrg=e*Ndoctc}Tm&@DqJ z43bh0O*-MM?Ax`U{(2^|#!zH*a*d0uC`#;h8NFM44qShaFO0|?;!z{O9`Y?l&m$E& zlKAuQv4aYUv#O0!zQ47$*xw4JcY6I<+ids7_ekOhD1i!Brmv2ntnFjqhcyc5a#cFE zXKsea@IE8?zOHEkLN{QCTl;~&25JK^zyf_0-hp>7WZ&NEa7>f#&2`9eQYyN>vEBIu zhiL1PZ0PRNXR)H<;ep2-;Ly6@4i2Y=UKrPBN18gKNlGeS?&8$VEEIJa^3I(~&ilrg9=LcL>xnSB-C*rZ4Af-pXJuW&A{;@H>c zOX|i4hcQ-OqGkeu^rbCUGaMQAp-~D=>Q)>A)0=(9h6tqu8o~^jP3H!B2F8$l5&trO zvciH$F~(NLru%ZP_evC9^tH#}G^M7`56)_>K}4RAzWuR7jxcxDQq-Nc1BD&r5BLD81d z*VvOES~E=SKdl&SBCzinW)o+ zy>B{=Rn&HPh0K>Q6!$c{ZNS_$_th}o1_uk{)E+m9`E_ZPU_1B4Ynsh_-bf+1qR^$o z86qqjdm48Wm{Q#qras~#rb2N(3Cb=L&t{l5W-8ODzIkj>+ayYkNZmm-Qkv_0XhvNG z$xb&ARLQ$$&f?hYur?J6XF(yU*+V=>)ymhSK+YEs7km9WJaNe-W@kEvvrSy1>P2bA zO^xky3=2!Nf7VrAv1S>6d|^Z66C(x29T$`LpDiD3@^wt=UtpbhwM2p9yaqAwuhBcP zH}9-ukqmff4R2Pcd(K%JVDdNMCpK>qQB=TT(!n5Z3fJ+9p*Rm0%Jokt(Vefk{%%20 zmBgh4O;qYQ>p4oeDa)(&8pGly!xYrBN%qBXM|1}hh7XL+WxP24b42i18nsQ9?qkpw z91(hR=MER?%@HxshaF}MDFJy5x709>HeH_K$G>>7XlSJ2Ib1sN;VGRw2Aiko8q~1K z;0HwF++Z7=k@{o6P5SBX-hO!nDk%oA^_`Y398A}?ieK7R61A)oV zsM_}4sBg8Q;W|6H*0I%?>myt`QDpY1-YEl2u?UyhkE6UqZ9#Y-LxX2Z{(x}4FU zD(+cMNdB)hOrHidk%pKXDfR9~+HiUY)DZbTR0#Z^B?O-0g+U3=(f}rEb59{9F_|kokfj=)Z)AaE_l#va)o_)+0_E+1Hb}n$aMMDceVcFJaN?X(H zdLKDY|6)n0e{-*M>d^l~i568(MAtA zkQ;8PRyTazBgBwY;GHJ*(112_!qHjtQ?^N!FG0)!Non|J9MZ&$Y?tZA&fL^_n=DJ6 z#Dv^K(dz1RQ^P+~b}1jM>N7q`U>wlKl2IP+w9KKlcwAsj_$qkcp)S3aoB#SB${=&d zo<;>}3sbPCru{#K~kTK%5$}>qf&phk06WH>gU6ry59Tx zPo|hcdY#vr)EMMJnNt_uAN6AnodEdCi|^z4-k%pFx{e(sb;u|-rIt7+GKNM^hT zU-CKHDr<@VLs_pjn{#S<6?XPZx5?-;NIgt{$5SZTmtib$bylCI6XXTqo z8&|Cg*d>a)$m4KFcoG4#IxJ3^;`-tGK_0iF;DFCvZ$U=Do|n?`6)Yc|>mktIF60Z@+bY>M72QRk38Lm-jyuytr*D(tJ^YKkysl=c&O-a8BV% z`SHBM5^GkiA{h^(G0TIF1H1|Sd(HEpVUnDQE9*)db!ZdJWgoH488zj>) ze=)^w7Z2i1*h~FxFq_-{VZMKBL-!%Kp#J*3{of&#A|`f)e>*w*#mqg=g_N!>S1Lto z6!B%hMOIYW10cj&)!}lSP=OB~4yR)U*RNEko$Ixg-uCKMD(g!b2>cy_g2^T;n*sjW zZ9GAlW25%f5$R>Gvr{Z3KtqXa`^&W){G#S+%=jb-xC6n?iK*HWYENm^iBIn2oapVqXjIQ$Fvfu}>+~oJQt)c{=Rdcz|1A?kr9N9o=4|l0Tj!LoXhm|3cXAY5! zeJ?3n2orY~?;;bOLm5|9?camU(T#8Z&yO@?G0Y3~84}9kjO1oE!i@9A22=ho7%>SMjjrQO8)n!H+AC2gwhQ`Pw1L>+aJz-;` zcM{79WvBn4$fe9NOEyq*EQV{&xy+OK`e!RRX63c0ZoSX144D^-NZr$R(2x5)Fetmf zY$H1ON+tAtfMiWxB$Izkxr4hTd#+`E$SRdlpl5Z5yke}ghj7*-bh~0qNoa4HWGJ^x zUtfg}o(|3#b1f(hp*8O8Q%i8PdtZq=P`B@5HiE}So4h~^93r^Ev5RA$lwc-Y#bUI~ zINPQ*QRzD_!glf0>Y?;Nx3V?EiLZ8V`5~~iB_gOd4VTjkbh>ROJLxFMM1Y$DuQ4>8 z<3MX`jXO)sy3cYB?dQe(rNlZTZ=<tJism z>DlZ!u&J&6XS#!n5T*kATGEc*_w*hva{duq;V{6&X0nD+&>N1o6%BAwh{_ghiW zlD=y>fD6tXIjo#)16nRZH*xiC9B`;ui8Q2F_P@DPQ3k^Zab-2G8+lP*^DgzKH*>;z z3zIZ83*o=g*5y90GS`53-;aWz)Ag&f)#%7E%(1%*2h|9aK{iqs0{Ajweca^G9Jdi* zoFE&|&0h-A%!~m2Vzz^*NbVmO_lp)KXd}M-aUpKlE3!EKqyWmY{aA;BevYx*=p+Uy zINa9^k_l*%o0(Y3P4KNZ(T>P#ajlC~0^^J+`y&%g{lA6mmgMsD*yp8m%+4mTh9hFjC7TL;Gntfib zS$P2xV;`E9IC_5QP^#&Aj9I58mH%?yIv)NUcQQKN`;nM)YsqLsOHC$r2?B!8^foXR z0rbyAChkN^9s^tc%|!sUZrULrw1Kx}KZ{VYjuZZ^x9JSK*e?T&;FK=PU>_QLA}e_4 zQ`DM;<|G!A4S-rf%=}IEWF=z4n$4q9X`0xjv*yOwK$*sskV07Er!pFq`AMURX%iZi zU`;(J$vwDfx+CrP)+8NqRKI(WlCZ4jOtazJmaaHcT#h#yjjH$DW5dCmqoVFOh~!B3 z`yD6Di!eY5xoH0H6?E=mQX4}O6WJiTkEG@iIYT^I98@l^pRam+XXbosx5~bHHK^&L zaN+kS)c0dEC4qD@ExIJU8pR8%0t3Pfm4j^R2eo0O3>YC|ba6~J^6vktC96LO@8b?W z3ix+=2FiO&6KI%v=_8daz6?;dvmn;t&a4Ju}Wh>;g@&4;!N$}2g}_%>si zO>7NpJpx7?svUeOXXTF^wGGt4i=J z2%?qVEze+up;OMq$Xa{#Q8A~!?FvN6*(3}J>!fzg4joEws*2ba@NNI59jV|emLKOn zSX?Msrpct1`Hy+Xn!e0YV!t=zQ<`wfCC`vX6yLJ5FcaOUYFGAb$5jQ=#l)U(B(kvn zpJYT>!Zi7$!*AaBY{TTfKi{$qw9T9d{&_N71bvZlqnIiu>^S zwX*QX7Ok9M(EAVpzZ z+L6L5H_#~@&$iC#yWTok5yECS_%3HfLLGCnnqjsBlbuRga+7FU3`!^}Y#G^eB*L7) z%vSO2Oqw1`IDZ<-aK;ne`V)qO7>#Y05%{Tl%~iuYWCXWcUaZb zjyDPVdR~l%<=@h|7D65BK6UNF%H7F*BFvYJve>m@v3S4Ca_7|C`a`T#>oRI$4T{vB z7z4y2(uHKM#r7XT8v|d6xxAYCr@tPAb-c( z*sz1SiN0xGC@zq*%%QL#M@+hJm(ch&%EiV&M9Lz7o^csbT1bi@Qn6t03Ndz-%rnkT z{!k;aRq1ixw{F^CnGe-13`mZ!ASJI%p=;)IZtmCqFa{Z|IfP(3;^+!TiDOMHpKPlM zwtG5SlC9|WIkPFqdw`H$*fWW>!y|V2FA1$|(|w$Ib-EYBKu}6*TN1sH*ojVX&4ESQ zpJM(QPJp=noh(q<+A3Fjp*Ke|2zrVIYSmLxh$*dZ8he8#cqUN#8LgLO^MuoAlU4bI zO^FIUOjXNpyJ!qrCG7oStzl5_e)qiGs({fg6T!J~n%9s^i>8R7P zGFw!so!v@yobwCd8eAth&=3+90AyhX>Isx)kBCoPZQG{(T+qeZViCr)NesA!a90Pp zwJaAZMEXF=lCx86VWbs>g4wvn+RDsqASgaQkuf#9^a?6~k(`YY_Ff59={&l&DZu;& z8TiD4_<~2)A>kb7{!1x^jHG3HCnmn>@>TCT`AOoR-G0(gvC+s5t>0uE2g9Dc%(s@c+RIvO5fK)!?&#_jLCoUGJTNj>+&LJzFYqg&V zRtP@$xP*IuiM-(9w7MMu;JFAkK1fx)>fQbIn-_mE8o+e1fk+CE(g~;9Ycg4LXkz=`k z&;b_S?9%(o70Zy$av&>iq*Mf12YT-3CP<(qU58P@dDMK&e<%|6dR6R0FYLvCe{!YV zDI6tb#fe5gL?+D}m7K@VSv?Ju`X_fBrsjvX&)g{-t~taxOU*pP7J`XHOCa#MX$i+? zn1ZIX8Qr6@!`l|P;6=bN*y>t$`#hz?L-31PiQY;%Q7r0{R7Cb!pn*C5!)(w}&l8iY zA4R<^3(Z8u=`6Owt04Sd*>k^z%_OZux;sbDU?L%UOtRcqB=_9_c{vX9^8Zkl6t{CP zyd}kp=vobsD_MK0gJS&uP~f?(+N2~H4^mhMa&(V%lh2jUqa#m@JH(you8Z8yqd&j= z{De$RD99Onu;{YxyX7!@E}#*=8s$OWMs}w<7z(d~>3Z3wLq-Z;AaQ>n~HdllOw+T|ps_0&P-Q%|?2}{y|?P+0w(2boo+Bc1l%)@To6P zEViL&O|R(j_SaN$4b<$lU3)=;7my!`-~4LS?T1s{Lh=h!u46BCNTZCuNBRwlN-eDf z&2?V>uk|D0h*m6_#8uWG5PLGodOSN-RF`|uKu!Ck_4uyzmJGUsmAH~YRd!@mIVt^O zrNR~PhhxC29bdGs%qv=VCPoDKCm-vYQ0GcqUUIblKNKSM)q*3BBApVCx%syW*t{ga z?bFiM)z*?kWjB7Qhsu2;S$0D_Oss^Sosw;`g^z%3n(0}6L*sMHOWR$6qKDKLgv*!B ztXc7nKfYv4(Nc3oS}m&0okBhqP4D(8)M8&w=;q{HxAyG!;{13SF3%j06PX76i<_KR zS{uK*HzLMx7IID!=+*nU*JjngeV!1|XaeWD&?_8R2xxv8mojR+$!KHYu6CQN3*ao< zX=)4+u=r51AbL8(1L@~VsrsX#bXQf>(9Bch_bzj^b`QkSc6_=fW5_QUJv;t2M?%P* za81(YS2W*lpXSU#Uo#8-=?EC263+Qzf$S$u$6cs>np*x-!=>lA4R}6sjN@do#`p~4 z6`$1AdM=M#6%fo*C;s;LN}|`toq7W)Hs58+j2(7W(cd96Q&*-WGfWyb1b$bqIWLpV z-ZU;AWmBy$gmVNs(=k`9XptIRFj?n1`}2bve>Rkjah4ygdir37rh%u!&+VUbe=f5W zRqVEIMRVf!)BlI^`f*J7<%eI@--EpSH)-qg-o8;-#+Dbsh-cV)qlW>)mQzaJ&%;N{ z)12>^Mx2tyZ4~4VOHT&INiMT>*4BP#4egmYRagwr?bd)W8+40CLFr2h5+e_l6GdSq zTJ^fTFS`YYCw~PMXZ5JU6B+bynP*`{c@#Hj_G-_=^!UN{dL>4Am7@XbE1cDYzbdp2 zac;++Gf|T(-%rBkGi2q4{EyII zr*tfe>D3+?TvOk;yyipf#PryI;cqHQuj*ui3{|;&F8#1romRMAPxOz1gKom7CTOvw znPsSzCGk1W^Lded)q}IDsHozeA&`RwQJ}N!ogjbuTg0xDg>G+$f2*TEGDv4s)^2H9 z`#Egi$*@!FH%#aRJ+{g?JG}Ia0vYKY_bkK??w7Fj%gA6? zvobh4>;E4LIgav6kfqRw{8@6;#4N3U%s|nUX?<{novr8INW3F zNP+VLN_G!wWUO7wODEdCaRjb2vU{(|0VlmtaY)p^3pjCSJeXPNG}awD?&&V)xGXp72kO4ak>u7rJl@Phhz{jh+@-P?3}YK6zFnk4S~|H z?FUsuA|KZORN>=ed)aQx&0gH)zr(v*pzmw#$E$GqSoIze5TykT(h+VFJv7HqO2_V3 zR;L<=GuISZQMG*SuHRD-7RvJ+UF52_iQ{A!H%%b|wceGWzdFPuS$vFBXaW}LS6`ka zeS|daHPG7k-FJtk9*I;Px+kI$oW&JybHYQ)OjW!Rx@O^7h2>IILPKl{e${L}<^uoN%tMN5Goua>z<*>5?0Tx1;>bSk+g?3icb0u3^h$ zDUbLtLeCgcoYq%dBA}pH`dl#SZxJa>ll8ZnW=Z)D!H7x8k`ER|oGc0hCvkUPw$BAh zp3ixUt@gRH8A8A%!ve7*l&!41_b0T!A2M3oB@nD$QdlerZ$lMj*m_~vw=8{=VzL>( zi&elS(aVe&uPkF#f{yr(M6 z`lk|Q#WpF~n%Z*>vgc+PPZ;=s4EM@W0UG5jUQmm3&`k-{ZnPFcZ3=?-XWP%?5!zjS zg#qM=K>IR8arMgfY(RBE&~)-uQW%tPcBC1@wSVdBm+ecP{`dflPjPr+$p%RWsuOB= z;9cTe6xo*O2EvE(CNh1(LN>CvxU^eg90bES=iAmu{#$I8V11F(@i!}Hl|@y>yDlw7 zh-j*#skSGDqH$U@Rm}!<^Je~nL!xk`EGK|S4^$uQ*pu`jaGr^Y;%o23xB%F-%m57>Nxx}yOW2{v)hiE} z`S70JMgiyTwIe-aB%2cxrwCOUQ&yt&Omxp#9;Cu#N&y|r9IZhX8Bm++pejVFqJ*n; zvdM17N7J2K0bbwLDT(QUv+HCxsq8hLN%(8mke@dCX_b*y&Bf2{=T|qg>YoI+YStqVPpxU)l~^(<8z6#)Y>c@ap7R|!t!JQ$QbrQ zp@nbf!WV|DQ&UHCr^nlv$9ZgqLQ4v&tFDmbZ95zdnmTSHrcg1Zps(ZuwivBroGGO@ zWmpctJt+Z~&j5A(61{$3-JA(oSrGpF*H1@(`rIo@zE~YBIyae>R1AD99e1}fT zmg4L!HsPk4?MLAW@AV+FUe`;d=xmpMO>G*2z=Uz))(z{L;H0zUOb?cyChZzpt&bIu+ z?!>g#cHs$OTk1rqtIglWjiyTCvNQRI$2@treB-`=-~6PNXET+zLaon?eD{uJG`5NB zZFe(&?IKP{!k^j;(wgoS{Hh{<**h*l62Uv{jPAoMfW$`1=8jTbUa~Hgn;4#$3nzPD z!k|-&wvSlbFXu6sX9}}!!soFax4~9a*#2G{%4CPIWJ4yZks!z|z?yo7^!G@rM6o#> z3v!q|rl60EPsfK!Pv9*s4_}C>K!A5)&67bq>(oL0g5Th6JB=JMId2M4#Sqn*+~BPJ zpo+m=n-XyeW;jCKQ!BlP7L$w1y_hU^p1n5UXI5^jSzmvnCs(LWtfSA5$kEU3iT7_s zWra{^8OMVw(Jz+iDUZq@wHZImlLa;ud($!mo)JwLI4iDYIdgusmY3@SEB5+4V$j;{#qP}dX6U1;?8vd3 zFhp6`*GGhbJ_8IvEH`Fu+UGLkEqrw2tgQk^qe?I&Z=iO#jF`A*3RXHPNg)0D2oFQr=I3Dc_T;8jaaaYUkFp7bQ zJRE&lML$k6v1o+E6`H3VWCK?X(@=COVgtht7sVGN(rS$4csgtucS5k(4HoS`wDL); zuNRA4R;y`5c>%wABF)IO7aD2)28|$XA`l)DMRVGBF)c9sWPPkuUf*(iW(dLY zdnL_LkH3!8RZ*N>l4#n{kLpTIMzjXMtCR+bP;jHdD zVWV4P|6bwg#V9sMqNxCQr9h&AL82**JVwz}Ao+h^>Vk?GWB{7LsEzP24iXKbGCw3jGZFr{3KN}5h-#h+QGsxKUjm$<5-I*_RT z;Y{&^{?+rcgk#fpN4<$uY6dxkx5&+m{}u0cLE>~pq2gmFUMnfukxDe3i3t9Rb+52- z+mbO$3U>0sty{%O72JN`n891+xe7dKLldHLc4ds566;*WYMiXm+7}IY1*xX&8qWVZ z`NaQY>++hx$B22LE{NBw6YwDPLmFur%S`F2nOTeaI|N+IFAgqBacL4a#q_Eu4xlUj za0g27Xn$wBL5tTNzKCMaY^a(i?b$IbiY$dwroHT({M~&4alHRsoPDIPlf92QX;&F~ z6Oy6)P{xiDf4bWAVuH38DBW%oG6y?MH>Cw_M-`;*E_D{5-^09a5!_sSCbjDJw)l+nb<8?<(-&~+O>H4gC%`q@13Ewheo>SI_rsIHLEh#I^ z3hD2QhL1FahqnL#G4rgFG_i6*`^G32_^^Y%CnK|rb4e09;YH{_iL3umz@{BF%7*Yu z=y71`-^tP&rO+b{Zk|h-YA#_rJs4cJN;?m>1{gmC>YgUWj?;;@KAC;usU&{wLt|)mafi+_;b{gy~z2CRitmw3r&{)dzB@IX+V1@sounl61 zTptIvY7(20LROd66~TzhO=oZp+JQUU%|9ID9}@|!FyZDBqtzV~aAyMxV%v}LLAEny zc9)&)G7jL})N!vAIq5f%O?H*X?%FvYd({>^{9Ti-wE^kQWIjlNUfYGqObnsC`RGo4 zIhtuyZ}@(dX(Zb@4UtYj5JcL)S@?{HQSyrAJ36b@LuDtjoSpX%_m)-Dnk#E=@mTR# z#=J*NOu*voySU^lM3RK*+w_P$z8%f34lcoZWuyS|cG_X+AC7EE!Bc`5zS@9 zs=a<%kWJSMfslU+@lFPNd?H^OSFDC&OQKQyfq(vCiW|s6sL;|!02h~Dx?wFU+TZ>W zzTPGimYtvHo8{c~V3n<18k%09zSX08ep+6PZ@+q-$}2a}nJ=o1W59w^xv9K`nX_RY zt7i{|VhEeS?Cr^wy%i=*LRkyX zY8AQ!)8b6~E{VD5dX5V!Rrf~22l@u{o74owDiUgwK2j=<51u)%^bTWDFA=-|Y$tk$&6c4&%YfGdMTla=i2ajC$&Y+i5G-0qTs zY8_r{trCP_gNLGU!tVmk3w`F}%rIh$kMo$D6%ODdUM>CS|HN~O5yoCYHt+2HsYAeZ z)p?0sz&u$qT$jlPRoUNOrFic*09)$VEKK|4XN68+)%Mj73$Ek*iz$c)Y@5%_Maay@ z#E+z?iawaxwuteYbz0Sx4m{%w^tx?k`di9m%>xJiE$avtMM_(}n)2QQ7~a z0N3A!Wiu4o#yTUwdhkaZa?qap~%jy(07IqNm?(4gkJ~J@?A;NC3%%O11*2>mhNw^!m5xQRO{0sI z=>Tf1RLk$6k8knb%hmn#Caf7@ZltE;=7~K5lTmTyI2<1jnIv_A{%F|z0lDQ9RsJib zcH-b~1!mF4Zy~B3ImDmljGc}RN-#4=;w6~celT}Lu5qgNdVMM#OfW109Y}Miz*4gYDjSGcnid#m_#f0N;dCJN-|I1Il^VG_-Najki$F!FW z(hAhjRpj_cO62HE%iN<0*DcfjNSKbU-OrlBK^bx?_JemO-;NG8Fm_Vo?SgLA8|J1r zyC^8M?cWIh!vPU@&m^3z6}j#<+7DUT+xuZ@&nKd20&{g0SN?rHt56|siCP2`{}iAP zBkhYUyu_jILH-DgaVaz_#Q%=WfEV&_2`3SDAH%FNg!P(Q$6PnYbxQ{@c(|0{PCghX zgSo-~p-c$GwGFqk;Rrj(YSIKC-*q`F5G3kX6@&dDS}<0*^FjCR0@5?PuW^m027P_! zvj1Cl3H8hOvk|Y!lWVL(j_za)>_u3l%>S{Vc(9PpYc^c+{P?B(Yq?6{C@!r`GX5}j zTgASJqe7nzje#>+c?bNPdd3a?o_>PfN z;4yvm+kvQ+WQpmVV8 z$r(`}4CSRdw?6&~IDXh4t zbz!4DDeW{0pliHX3J?g~t)Iz!GCV(L67Twf#}CF|t`xx?`;Bq1&kbfG9W9Bwt`f6- zt7DeYsuYIdRk!C8ZA}7wjRdILazyVI``we8nQKAY!>l-S2U|!oQ7o>l*o<(vny-F1+-^4kU zznFX>g9YI%SD_hvxgRUsa^ZX6GoER@)3ycuHEOxY!DyLR4g54Ju52+ra6jK3_6!S7 zq7dg8{pN>uw-T$VPF$$^X=a5RHlq?sWXFjgLe77+Dfhlq#5=x}&Oe8krrt!Ye=8JJj*O~G& z1IfH{LN`7c?qH~KJLv9j>iz?$1WzeHV~H)tyoi7Esi5|Q)W=K<#LxDvyXLO{0F6bT zeakRoa;c;b+uQj|6SDiIZX3~fs+XixMa_m+-A{f$9 zwAAeR@{>FhYy9pTr5wNek#|Tnag0|=PW-_|4C}Qz-o4Fzk-urjNij+jsY+j-EM!zkxn@l(=D&G^^F9$ktN8Gjr=@Udx&JG&%L0BmG+!0hl zkCRx)D8l>zs(eAq20@DHaK#?{rwPZai2d^PW54MGg<386v%T^Zo|CTi01aXQe)vk_ zebH6=YM=GQdYTvgu76>Ary0@+u!W${xXF@da}l~qbs z@GyAL`CJ9E_#Fh;JC&0}26FT;GbRLyyi+VAudiTVy&0RYx(O*mtAosasNWz`G{v|X z^fF9M;-W0CtF1T)C#X;RhK)v8e>s{Uboxvz$?LzKw_X;~)jS?J_IX#v2ofnvrTIgR zaTs+rY?xqt|0yI0%%JZNottZ~HTN5h|0=JJ)jBL{^#N$D>}nqeeZA5%|F z7DvVxEN9t8vOLEhwEgG90BsA_;!3FM&U>O>c4F6$UJyXHt1Vae3f&@hG(KlY&)B-` zx<(a28(5;>v!=B;qdV+Z;m~m%X~DC)ys^S)qmNRhcpzOsg#K-BcY(}il3s3>R!z0X zSBHml3DW5$FRZOk1nUjtxfVS!_k3JW(q7S(k>*!b%|`kkK+NNShO1#$?6+D_E5Vp` zN710Gctq~$@8i40gC&^$#^#yUA_aYBRFBaQ*G?ggnd!E=1q{ud!4D_-D-(7VesZUdVsI~S#W%f zkmQP{zyf%@LgiVM*j!Q)+oJDPi_RCzoHQHjoFO3~Gh*N7$SrtSN-|t}kjx%-`%DYj zX0oc%CXM+^Gn|IC_ef0h?*fQJYJFkmY9l>=ya&0beMfZ*P+C3#jP*+=E|UC;OW$FpemrX+Y z`31RrV&vn}QaFiGR1X@mIF!F}dCHE!H^W)4t<|GCpl%hI8eZ{)vwHup&Cv_D*Nu;W zOtHY%WRBd#eo>1o?8hHZhJ72t^EB<}uz*v2qh#rV->OA&xl{jv zm<2K6j6dz!!|UkE@mHMFW3!K6YZ}OG*=mj|Xd(1eT{G0UCSl`dOr|2NFID$3k#8a+ z5NN{2dn~P)@Bdh`$o*1lnL=B-#)kqEN;k6x-^_yzGb5d_btT8mW?n~<2KL}nS0n$0 zk?;OXDUPP?iV^oIU(!R_J0jbN$(pvW&Eg0%jD)woF|x`5lmt0 zx7SYj+jCxiC7;AvV+ig8JCV*v1{qu)qn^U_2UAnqRWEg?%rz*47(Qyn(7b<}$};TK z=XD_+!Rh*33QUXH^f=nom1wsFIwFoM^(Ikjuk=K(U&Ol-0Fli#@Kq8Hr!Y}WimWV? zX<&(|%i$m;7R7O-8XB1{F-98)zfFz^e2qqYhkdf~>peAve^0A7_04wP6@(MIg4SN9 z6i~ma=4yL}i=uwiHxAMQ5#NADq(LJx$W@!n?PG&3kCSS19rP8kiI+I)_ z$%Ob@vxeRG`vBE(7EWzu0%b|5j;v~URH+)yIXh2HqWBsBKqfsMzV`~1K%vvr9)491 z;KIaHKicyYUU+7y_eB?%|F9i!cIk}O?au!{Qe&0gfiRvKQfo?BtF z3Q0Zqto~ci@d@hZ02I}#e+#SXN3L>kIyt}I+d3+>y}YI06H=0LYLfotCW!3&bZs3i zlL~}33y~*90~53O(hh1sr40^Q17Hg8&_;jq{fK5WD(p0NZH;6j znWlBoS#3Z~KK7Y#`hl(~6@@=a{2EgK9sf+?Lf=PEsBdn$%X8-2M#N@uCoMt9*E*6V z^V>wg>X+n0E4AEn?}K{>I%#z-V>KNDKL)b@0JbI+l5ZLYa5N>&3}qoyI3n6RSKe38 zo^N&yCvO9tVi{9efw#=rxE>F&N=CBbUBwqR&~98tCWIn+i{l0`*Yo)P51o$dm)XN7U{o$!yAO;2cHBiWd%G z$3+dpv|XZ~ucY*asA<>_^$i!9iXu>qP9u}s4BxV3NE6Qhxny=Ls7Q_XT^OMy+%R9B zTqSYDwk+s@nONk)FXzX{-f}z-Zo<49T{_y|)y1`w1AJT7Voq%5-kskn{ zLDWf+e(d0(foli<6ZugR=rajCnP^az{FUHq7%40MC}dWhb`j`jC>T1nVwP(jX^)k& za)AwB%+ZAIeKl{UV``uOZj*^;06aS^jF!OAAh3z0%v)&>24W&$q;-4!*Of~nA&EGg z-k}r`tN#Fn91m2>X7ZEG{{R*Lqny9_|EZi$J2&1pY{Cy0)MRg?(m5lVa> zS+6$XPt(s;wtXXHYx%-n9p7IlcH$KNFpp3B5pTX_l5m#jTEQBsaUaq?z^$$)&nJ zXknUU+r?w!zS7nCp6s=neX;R-QdL_PUg6M?5D6XHhzwVjg4e_!rF1cCwh}A>KhgJ)5xMn)q^? z7qd^Me+rH~TR37OM}(7;KV|$A*<>Gu5H7{nz?!%sc`2btnF&8!0(|EW_@T6wE z4qyGr{tK1kYh9S!!diRNOgPA8rJ{e=$F2c;Td1#TY@xVP6$$;q#S^A5mc&7cxIab> zQ*$0Nyw3}3T3x1$Y(!ovSiB#y8S_bWE_`8pxLA7Y;9g52p4u)qf)Qu>+E!kSVFH#B zw1g)f)he_wnT112{r$RomFv%Bsnpr+*V&#nU#V+H=XnKIzaWV1n@pr{HJ)*^K(Tu- z`xvQL`TYqjc~xz&@qhxY=>^me|d4z{389$6F#)6T4 zR8Lb5L>wHmBp0UPbTjl}cyrMLK(VJ`hJ_(WIJL(ZE%U65|4a~lXF0R^p%IbRSUsDv z6_Te`Q7OV5$c-nV-OyFJBLcGhITy958?bR@!tgmyKgyss^~2rEqVF1k&=32ymi!IY z(axD#7M%n@d%rJF!1mpK)W_TjNu~ti)q6P1UT8eJ!ytmf`+Tmd&*lz{iuhauRcnf` zRg57ABmC@uY$OXO%sI=ML2r+2PN>x+>o!sq~yQevl){9ysf`$*RI3 zHSD?%=15XuRm|pV;{>+YmI+9aG6c| z-YxGz2RY{&WK9@&^tPU#r0MOG-FQV;_LCPS=K`+jrlFfV4#QDPN&H+!tSrx zSJRfR2)KVmjeL1g?f^P_#dUfU0+?=!2gzAOW%t@UX@V`AXvYDMKl(1>F z_%Ij&k9GD6VbRd#Qqaet-OqnW^=`M+EaryUbARzdES!6_Mw@c!oi>(~_gE|CMh@{dGg@_f3_aP=m7bkWnOg+~9qaD$mWKY@t$Q%Wq3ERO zSw=lmiRICz+MV2=2bwl(^v2B8CY!Uq=*Q?W+71*Qt3u$i7pDu^fG`c zYv6ML9pYpKwAx|MJ@QI8C=!J)H)n(~jUIuzo9RsO!`FqpPeguKF}k~T6?j)^{^4rq zF3H#}63HvrCC2vyaWMf5b1T1*i)JGO$d#`g?&Ef6OUzrRjLMGEX>9_fZ!-b_i{qLHBD@JJ~ zU{~qX@2M}p5AX1in16hf)Gj?GM`Rk$&&mBQw+hU$WayE~g79-w6%p>bS-Ji9hsBGX z=f+HMIUBnuWPJuUyS~#z!b(e=D32tnjowATZ|kd)cn{eCu6uqQ99Dv-s$E~*m}Ux0 zb7qD3Qha_3Wtw}k1;|m~+k3xOV>FMM7<4T&wx29`q~)$(j% zWDcc2da7HnB`eg!=~iDrP7JO~4t$NxBSF~PC;kkV8DuHv4$jm8ujo~HsVm6qLRyXi ze)f7=K9Kc??D{c5vVy!DX-x3623v z?I*Kog0VU##RuH_q7(^Y6zfD9&ECLBL~LXv71UsD(` z3-9`tj|}*x*An_7vMl{2gYMx8rbBTJNHF_;Zs?E14}X|ys<#E4cZA@lgn;_H*Ti=( z6#@}w=Kq}k8PMm@7{+g4)CtSw_X@dN8w+bMfl0QXnvOS3)&{S~1uX|*eZM^SZ`z)H zmb?5?_i=e8oyJK4t3Xd?L$%7qM@qSsKK66Rj>dvWaFCbuTP;QfUvy@oehul4wxy>_XeJY4r2@Y)Y3cU%T0qH!=BMXu2up$ zB6*uae{U^&e1ARmptGMZgJ;?>^Lf}OuTUMp3rGIam3-Gj-UJBYb)mC`b z;+By=O)olLE8o(OpU#BU?c~E;+7G=1h#2wi}qDJ`s5*UXw_2{TPi@mLYjqshH zuHwj8cw^-WKFp>ld#fvX*$^gtfzCE_9cA$2^^Oan)$&;hI%(mz@J9b7GMxKZ*1f<^ z149i_=~G0Okl-+1MFLmv3e~G>G0%(=nvrJ7{{a}JN&e9>W7&MINd50x@G7G>-oDGP zdmi?X_?rS3ZMa>xxUK zSZ07BUnd4bCWvod)(xLlg@qpfB$2?p44K)Y{dMd3g>HqmI>O3^+~Sk>3N>!$n}5>Z znJVuK;uiWIP8W9nM=FnBhxvbiwE2}wWYQ7M+nKZflSM0%S{EYse@hVcfBiC5aAK}) z)>*kU~i&y;QXeM4Y-M$#gc+Y;S+dn%+!%YR%>JCx?2^7$+|gc0UJP)Eh!h17xvh2B58`YsNCi$|<=zDme)?K)BOB}Ji}ye| zThU8B&od^F%l*d)moYi2z?FIr98f-I?>%@xxoQFJ6>1Y!X_$`5c}QpKSx@rzBdH~p zBlo}V=S!iiffd{)cn-T!sv%Wj0bJWCNo>IchONTtRX&;xci8j z&^M(|bbwdR)|lq`3`OdkzalGh;8%!*o(COIh(OL6R##&_SO{DmGn5X}#7WBrqvVV@pX`1P}f^T!*|(hJZ-S{9nREOqo;!v#RgZjf(^Qrl zIgjq?T^b6;M#`GVT)YDt%=Jj%=ypC+bD|MFlS`%+5<-l9klw)*?qyqm%7P?q2o@n@ zKw_{tv5qb+7C$j5FGt-g7m-@lxVH0XO0WWM!&$yY6W-rSf`>!cX$-)16+6v;d8x4< zU!L5DHh8u%+LxfF1{_M?J)+LN2>9ajeXMN)VgTA?=c7)*r4X0H3j|1&)epTSCFa+J zKZcvCpR0rk<-ShoFI-BPL$4@B-?J0T?ZNen$TR9rzc0yw8D_-hlS%l~<*HA1oP!tK zRn;V$q9$eRl>>VL$d2j6f1&QrHVKK)r`&KuIj)#EnRx2Vw&n{_U0f8H!v8ZBcW#zf=KXo z7ajjKI9T|1Xmzlmsso{(*?IuKt*LqvZB7W6!^idbK|xqut8wLlsPUbi3ebbMz_d-m z|7C3avQr@=ZySUp<}PL$ovUJw_7|1nVAN2Dv%5`ZsoZ|CL3Uww`7Bn@Ft-e@uqty!F#Eyf$&>4v#*c?4OB0?0}fFWnJ(V4)YOjTXPNkwJ57$ zC{u1pt2~tnz%%U5jf`S)P?PWN<7e-d38tCi|7AX!j)N@yn+A|QKeFWgc)=?qdC0@7 z8ZX^)sb4>h{#YDV1gglY#s?&)cl)yW0%}#b;xd8_OLFL^^8TtU*L}uqv%3C(=#+kI zyT%p#myTb_zh0s_MB}NAV6GC_aCrbOTkh`iIDv{oa{U^GrqXVS04Vzn{+_l*5r{bg zCD_iqg^<1X)|;$vewNXEFRBuhh!9F0wA`<^ae*9-QM*F$B5A=W= zVKCL6JQS}s&u^b8`{}EtyJD|gV^&q!k#%Lz9BZCYQ^J^y`huWGoVYF@)h z#6y8jKNYrdRJ$PrR?(Jxv+nOUT1CfECq8nuWFgXLaLK5V=0;gx7MlHT(=GGlcBF>1 zfpr0)U{9Fb$gim#lbS%$9mqz0Q?F;H=s)2SN&lN(@$GBIqQ+@w{z!RND=Fhw+3$9q zjSln}Ub`>YpG(b(V^Wq>NgBe4S&3J^?Q<6#r3J+I#5An7@BY*H1W!$cB5F?`HC4as zW%rNM`M0g^(plv~m3Na)K;EoZazB~55@5Q_1KWKJsa}%*_}2MR(sRQ-<+(X`iI9Bq z2mZU{mV45`%hwjhV4a2e5K#tp*?rFGyR3)!Sj{f3*rO+GFM`;WbBgtw1g7MT*AB!{ zKG%p)qv_3R?a~_hsr8yZlY;aUa`ssx*h?n#cfv2@uVE@pW~&V%S4Rz$^?RBT&lOy$ zNW%W&u~<$9a(7rLqSoBnM-|ZDP)x{V1#SLhu8w*7)OZ&SL1eAcM{EH#I$tny)9NOg zUh7|Qdmj5{As30B`X`FQ{!t+bH!2~AYPopxJk74#u?6RMRC#N@qrh|(Z4S2012O*C zKC_F`>HVbxEfloRSZ@cS&_1X&D)aRx2cawg&CB9qXRw^vDBbTVdgiDH6Gb5AujC!L*2&oIb=Efom$?gT z#{D}8oCW{@uE@e7lE&7*l%dV2uWX`iDxSRXcuom?^H!rHa{kHLttT0X( zD#G`Hu}jK*DICgLtpir`l53S0$CFtx5d99&IFF`bFLjZPh_Q#7)ql&mJX1S8toAgW zF~q7KF3VO#S)LL&nd^H^@wvZn7F8UxvLXa~KbXLH(Q-#?E6F9-Czvdn*L>P7&dn_i zJkUXMvR&4g2t|qDeU2Y7(xhj473!soMJN|Gg^apjuIo;_iAXLyr+`rm)e{ z?Awq`_K)`DrMD=}^Uypsa_cg``R%BIhG3kk z(7%XPNfe$AB&Ai7nM}kokn9iSe#jy5;}aTYVIOxe-oRD61`UmGg*Hp%o|(UWWk7;! zw%6Jol}s7I?l-jseZ zbXRoIjPv$B5hw{5r!8 z8eg5}P$w;Fz?FB251kdI);sk@OpK-Cm@XC)6M=Y{gaP?gw=nNw0~;Hrqw=EDObJ3_ z&J|(+)8SE&>Q+k+>-&gG0uDk49KoXj$M4O}x&XTz58bnT0xJs9DtDFclwFYom!(XB zfIH47h6{!Q>$u;LI7MSE@`k(7!2bZR33pKv(r0nKu3(Kfr53Iy0G_`}5BPge^# z-<#)r;6U6Eqx}*C?qJKCDUFGr`E=A*}{qQ0f;pbHq#8GGo2({)bwvr5x5_CKuRW4Zy6S-QjXIsK2W|3rl}PoW~?faY;G@} zu3VtEC}yI1-)X5W@;DXUIw~xL9x#Xh;V!3;ThzgQGDz>6;elSj+o)Vq#8_BcWO}_P zD|;*My=J+LT`_S=t|ZOu&9b*uH&fG*KIJ5E09s2-G{dEzArki*jg`T;+&SvUx})0P zQ=N9>$-Ts$;jy`U0`qSJO7ZO~r3W29cHnJb2eTESWzw3+DwzSPxcM?|-+TrRm}qm6 z>xWdSepOZo*;m9*=DCU|Lb;=IKXFDq0=-+NlFmMsTPFWuY5r;wDxt$KE#pW00&2!9 zS@uEC4Wm&8n>4#N#FZbHxe`%#%s^Q3QXm@a4{<}b5xr}8X=;~o0cf|(R)?&&&%1d#dAn+v&cpjvTV7bG zu5q^dO2CC=JlpZp*R?)4+2X%p>~HOz96@nOybIzJX)ox&wtcJyt-j)}8} zk8_iRA^%XqDUh>m@sBxHgdx5xVqzGEiBy-`H!NH*p~Xno)m9zc;_14L{(h-2vTS=`T&&1c=nZ1-X2Y3oaS zw;rzdQ?T{22>q9ZG&UhoB7Zyy zF)1XDKZadw%31`fjkkV4o7AD152&R9;~+8nNM#@wS;t%nA;X#<-MH9LqVmQd^qk^b zQBt-9+D2a5*sNR!7*byGzI5)^801%U|Ht7X$(*$kqrO4;vrP070GN5Gh~PH* zutVi5l-tBv`iH7#Bn6fBh*51`byb2sRe}MWKZ2F0bijmHt; z({jAY8n}$ZD=yAB6Tmn;g-NgsMC9l1i?P0NY0p5G988+;y`olktN%p|Nb6NwnQ`|+ z^Qs4Y6xXZB-W%l$gF^(=B)~c;Z4yiiFZaadesV;7)RF)?8{)$lbK{0JuU)udhb&G@;-D&a|0j^>fytLHw;*;(R zUQ40Z#A|@CD~8JFyf#;^^pAdn}2%_eQLH zmEf-jRn;#@TzCZlNF^^V{l?_D+7nt@qp+0XVB$c^DF#9iA^k5v#Dvk-5gWc9KkZcd zXKv8zEDOKh6GMC3Ugx^RHtRJD2uPXPu=$?h`3FwxSnLp7PWd;s&e&N%Xj_G zNF^GNzx55UNg#ZV>29A)y1{&7p?cU2FN$FkSgT=kUr;=vtM}ggn}_ z_>prv%;2l{q?gAS+R;tbt5e@Zz@Pj_YwLZImzquEph15G*#?YnO8oe!NGRO)n;Ymy z2@3~dyp#J)sqm*xh~v+|yzWF51QkSlUGw3e!rXZCJkPDV0y-&VW+?6IkTl=C#Qt#> zzWTF%v4Edt<_(z|i@oLQaVfM*MZBY|3m5|pak^)*s2ZEFr9$rt-fe&scTq!v!9Nfy zEZv)-I|JE*vr1ljqSh9N z6hAM5nSOtUsuMP{msGpjI68RJtj_ye4i;3kshM4|?>8cjCb}BAB*p>d?a$vrL2~B8 z_3&J!fvg*3-u%N3&k&s?F1h_TCT|p$4!A_UD=!+?LaJ?rIVy7|*nB8V@mh|D>r^Quwy(>Jd#{||Gj%>U3d|9@^y{U5Vflkef-KkBOfv|g^7Jr9wZ zFrD>(HWrx9^o~}Sn8PMKgut&aU!bs({Wwg!XYb!hduIU;YBDwKwVYWRRKJA^FK+#m zLxlCi#t)<|8-cHZ@N@-KLThN~x0cS4J8MPmgXN%wE{KIh_fLvRLRV9FT959l{In(0 z5yuJyqpebSE)O~MP5*2KR6LH(x-V4f_9u?D{Pa;91!!fUN|D*l&9uWln3MfD+H`RB zkc%SGLPk|KqduSo_157b4V-;wW|v$TF&^UCVS;e+q z4)XW-*3g5`cn>&i?8P9hG?zpdas{=jO6G)!BC(Io0qwnVkzC zFoz$;+Lc{SoH*42=vKi|dV8gOZL^5?)Jm`(=)7KTovO5K%~KuAq^5-_FMURZ2+&`B z1@>br)H}H=ay)Gd4zi9i_y}Q$^Ml>mM|B}*m_9_Z0GESY8L98lPd*}B>yflc@4Z97 z6bBCzTi?>Z3y4pY_887R4Dgwa4fqzn8MSyr#7~`;!+ffP*ebuWG^5@v!xv#O=l{P&lI4;BtCX3I6~zxCH%k{46q(L%RNn?C>nrkNINEK{9XsinJEko_(lZ<&Mj6_>$zkTRkR{ zFwnhn11Eeu3 z#wA<8`6zZIb*KKhcvqXVc*zU5Q~jpOGcFDfFmGB#vY9huZoh2siomDaPU7spBn|f>d8V7*0A}j5S z%#_*-zey&SXs=eYB{hv2WMd%`uX|nn+Sn+mx`=}RnN&pm7Q^voYvJ50v!(Zz*9T_}SAg<;wLPq437ye~r>P@0nCvm#scRSkBbIfVi>>Phi<*T)(m~x4xlEP~Jz&OAC@UQspP)HFTuzpu&0$14n#+S zlq2xTgda+`q+s$~_YwDpHHMlG@Acv!K$hjvC?X88IxQv;I4y^4UeDNc(9rPWRs*Ec zgs9p)IScdivrCoyCH$izE24dr3jK7j7#ym0jC~`%^(XP0$9RZi+!(yuOk?*Kp3kU9 zN?V`x_s=VP6Dfb@eqJF{+*0PQi)jvu zg+#}VQft6$g9h8tBYV@K-{+*H;na*|p(Q4!1?W(8C06=yYr-&(wrF^&ACssX8B-fJ zRU#66a1t-KAZ@B3uqy1lj#x2fF`z=ZPg^)I=uzEsIxR>7mS&Eb6-Sjsx?#m4;zLrF z8S+wGKRZd#d>f-{8J=X*SBbJ86tJ(qoPQFvnz}E;*f;S)csdhz`jndJhU;D>c^PfH z@Nw`7F7Q1xxiDdaQnxs*cDMAho=X*ii!Oke>yQlbGvYy6H>xQTL=G!IQot$fk7_b^{h?lOC`yd6VGv4qkXIALkehF_)6#aX=;5SGv zJ&@ga4<7z%LL{wpD8&6mfoQ35YgX?$FTKH1R%$?;M=pj}GUc$R=+Hh!3-|r5vC|!{iPM13K4Ncw?n$ zkuTg{CWEJ2hI^Fvg!@Z?eaec<*^|%SD4CXD1k4DrnCHHqo1)YrU{6l9;l;~1T%3A) zPtb^!95-czLMv7;;M8#P!i4Sk56wO9LhqW{yZ zu#D~j3>?R$_W#ys_Fz)O9lB#eA|2Hobce1aJ}-+Iwa`DS^(9lezBC}7q#LYDZT{@b ze>=E)?o>KS#*UVgBKiH8<3)2yB#IUPV_WqHvtJ)Vyfva(+mS7<_6g6Ki2qJgE32g^ zN6iF={PgfjWk*v>nXEv!$&7N5tO^k9!q+y?%)Ytr#itjQx8>5yh4VMfy0N&{RY}<~ z8TPzMG(3kg|3>39t?t+wQOspS;t$!k>y_4}??1IZS>T<-q>BSuW>Bt0oT2S8i+Ox& za`sOP94kZE-gfl)TM~}x!Cn~A(QrcV{t1L9BKDEvcgNnUW>Ds zjinkv)`lk~kZEPu%NBDE&yc`J8`II?{r;|zvJnW>}a%SIek4<{Fp(}xX;O-SP2 zw6xwAzh|^#;G)BKCeJ8yQcoOp0ujm@n0V@zy|*YR)U=pMVXup+E?up9qdLds^j@ub zguN3Z8$cV4bK@xyrajsI4- zE~$Wg!Zhn#B9;4wxA$#(UL(m^ZaVcNg$^WnBF=hUTKZU5HvK_}-thJG{6o~YGqQt= zC)tS6PH+X!w&a37QRu|#I0d=IRF2nJtJr;Qo#Q@oT)hYKs)Ynt(p5>;>wEv21YV5* z8xr?gec9`MwTY-7b^l_^-wlQrbkZZcj@bL z4J#*3!bFy`+-d(ff=y0WqUy*}w$(BjEXJ~~?mX6UM*FQk@efdj_JvXlh730uk{?JU zd+a!<+!Dk0Y=rT*ruGdN{C=`N`>}yjQ z=ZKu{pxVW>o@rfHxO(c7;hM%;62lOpo{IpkmYpvf6>?pXoF$dW?r!N@zgryH(F)<& zOYiKA7HEt@WP2qi7-xQ5y$qiL!q8_l!joe0gTHbu7)+D6V9HGskO}Ce`Zk-?1_pR37Q*Cg0rQxf?yd>ufjI^9@q#<$;DHllBO5``h5p|!i?+lHc;zEs1% zAK$8dG#^}uqv|4jHrD9~Vq3LU^!yJXP69voa!M+rYok4}F5bpW&CZwoL;d=BS%2n$ z=Y@CXO-Ng8l&5@W!942Eo_Tj`^6}5Y6}6Z?5}?=GS}XA!7yd&d%ClnxhxZ| zUw<5pbrnqZYa@}k#MkB~%c+i!c)Q{s9WvKr-y^7Wcf)29d(?-L1n*V@N{R1`(XKfbS=Knd0iDbg?@nMQ&g)VXJgTH0wE8mtXWPgz(y z277m(zOc|_?f)R0EkMynWY29D4RL98oh*x_Mv4wau=m1}q9wIU`fK)|sK8fuf zsdljZn~av!~ISahJ*HX-9O z9re=COJM&5C59?;R2d5946};#%;uj|Br1s|#sF{@8#v5QwGG+D8yJB98g~s3--v$xB9vYcyM!p*|$w?sv2pWB}eHk_`_=&O!1~7qQG| zB#XhLZShGOR-i!xMu!*iBs#iUQsZ~ycY{I%hW8v88DJedU+W>Y83d^ksLF*)x^?O| zPlepLBy8`l7TzkAc+U+3R!HvPRvvEB~oiAev?+Mz`i^1AcAGPQp4 z|Lruf|1q%nU*8ghPf85dq=uGo+Ju|N4%289TdF4Y_|AK2uEFO6^fDW+@cvbXmZQhqm#+>fl?kCMnfA^%=`#s z3l~*OC#hfBT!;~AwLenafjofKIa|wie6C6JX=KmC{DycS%)Tq7&=H35F#CY z5rF_Af)Em?o8ioy^Yixy z+&kBE-MiQ9v-i$!+zHaTICi5;Y0au;)lj?dthW#!l4QCZM!T4F61y~7>hJzL~vm~01jyGSYxv<*ub)p84P zfRQg#ho71GZbhe&(G``30ahROI}Zj|k*n4^st4-`?(95Ly&ZzYSrjNkoRq4EY2raTZ2m?OD^W4`oy*)@JZ)v%OLfEMYH%5w}8Qr-5Y1}1VJzmM7!h42@I@X1tN zbr>ErBY18H#O-ENXT|fy(th6J~PuS)}xRyBvrG*xq}(w48lU*A8a9*tCa@{ z5n8L&+Uv5zXa0Crd8T{>75Gcqgyd3Hq^U?v0KX0-OyeqJc<~0 zQ#p{F5iggU$4bm%u6~z5IDw@@7|Sm}xDBh3xkzcehywV@#NdZaikN~eX6WW8YwERi z^|Vq|Dx<_~keg=CuwO27Qs28}+$>c|9%4eX#!}=aQ$dGhe$#41?$u!IYUoX&gq!DE zw;Kuv0RRci6I>rYP%MaNU8&6HjTo&TGNFkn<>(?Va=^@OcE!%s`r(tTs{v-hnpwHR z8X?Ys%1GQtNvl{(JZ2s{C@wRr(?LoMDp})O-=d;##rWz3_`Oc95wb?jj>=FHnsu=s zSDrNBS*~FqHV=Q{8N%~gI44!xVEKk>banw-B&~@h(4$6ub4Go}MucwD2SJ5qD>;TSu^NaQM)D!PtvjT2ALDvt9VL2 zubY|$qR7R@FJVD=la}BzK38=@4#M}S|kV{6^r)zSpS#7{Vn^V;SQzbJd6d&$Sf$z>(1Px{ORb;eK##0$- zKdjP%Q1yD90q5jHuyCgv`IR!Wt&k;71>*}U`^ts8?2_OuUK+xJpRWD21)8<{dHo!s z<}!ohIGCv^?}YvI0w?py+Y~o!i?u(Z>fQ`qHVAXp)IquZxMz9|q0d;=@+vPzUs{II zOp*;CLQ_^Ywk*vJ8!SXRFB&Gv%iATr7~_12U1=pu0`PIhP6k{!1`b)9=}!fTE`3gW znawMatjauSCQC|)Xs%{iclIR?wj{#qABfc>(2H&NDBy_4>(d6Ck%q4+jHXBB*8(*Q zegT?F>dL*k@q_)s{227O@`b$bXaow!jze51Iw$QVMr;H43t05ZQ;AGczC4Q5e_ejn zN(U4T@ZUw2#`$j+vI!`&L*I!@f9+sPHJn_zvHZy~lCA}<+z!5L~H(|(1j)X zZ~z|jF4+Iun6&E$EGml-=FOU1{GjS_zbF*3T=B)~YPsvZ)JnRK53N7n>sTj^Uh7mo z19BXwzbcmkYMeqzI0MOMmwc84dk|F&T&!{}hp(%YhGJ9i8$(K2OR<_0i)oizlCBHF zDVhDR<*xJ!?Ms1=tO} z8od$9LoQ1->8;)+(<~D7IWDSKj@F-YLKwRd@BHULQuOklZAL@2;xQ@fS0#sRpa1V* zn?Hu)(2h3=0llKuX*vAeYgz!01k4pqik&}?GBTa?>f3MczdY0&u&m^VyS57G>Z;dqWHU=FiB5ZCRWv<~Q?Tc-FzR8)~2fcBPG z(lWKrg6hJTxk{fhqL^p*XnM1A>sUGv3F`s0baQqC$bPW(|^2*Ca%puDA!#$;`UjJ3quA zeD>zZK_~X|a{OR+wl_XVb|?>#K7a=+i9cWzFMz^~5~6qs$viB!ce7LdZPcKjJoWnIz`R_Kgl`!$9bI|0AkR8+A}bv%WUmd6nG~u8R{=Wn-swc$ zRBh4{$yNMOEr61mfSN8f31UP#CF6)oPtoxx?z^=1=}b-p&&3jh9=c-h2q^-IC}6Iv z+MBs`v4)nKoO?{6kul8*?ejjR`x__2OmP;KDAU=cHg1M>^=TxL(cyZM;Nc-I01c9@ z2=pUW@y%z=wD|H~Me^?_Ig{EzaKhmd?^w{PuYe%v)eDK;U@+6sg(Jb~_$@+{vsGCV zBREO3>zfJ)X>99Fw$L%cSd7b7x|kuqbJ)7P!1T;r#LLZg(YsmSH9ZT&0hk}~=>hu3 zvhQ>6@%y$$y{?I}(x(_@v&(y({2D+9CAO1(|3OVxuvUJnn*OSS^)aGJ3k4L&bNA+cDLK`@GFg$bW5VQ=?;Q$vbyRo&*QVRQ3|TtP-2>;#QW<;mE&wiMu~KDoxCKn%!7n*NrUA>&ZLsTLNeou&L6~E7_`S*-!6w z&YHiU2T#(nKjWCuyi8H5bP>!^G{veX?7D+{PEPZOS7rJHPX!elq9=I${|?bh445MU z6L+v}E^B0w9ZtA}XGjNf=6+0n;7Mzaoep}S9nwD;!2&gZA%vaaPrM1EA@6C&`Ia&r zY2TK0s2QW?Qfg_a6vW0aHlWRPxFvj35vezOCp6*Ky#aH)cf=;y?A&`dvdcNej$6X) zySj<>Ste>W{(G+V2{AV3+)ji3X$y^?-P+I7wRh8A3_;l z5G3vAUr`hvW5pNl&X9aCRg5bgAQKU)%sawcBV2CwDRpD&Kl1cOYyItj`}{i4AKa&G zpf<&yeVl4?L~hi}xSn35X6|ONE)mkuDp)QCH0p`^kY->nCenb$GGR0}FAY#Tap;;qaC zQ&+QprB#RZ-(>xrpRsy|zc}%r{H_4x+*7swbn9xo7vF^PM<(iM`LM(Kb-Wu%BMVm2x5AV+-Ebx;BBwYdkzWl5s#9 z2+4TH1-qFw(#GcVeJ6}imW*8sOLcJ9oZ1+yJA?~hLWXd|&q~W-L)*O?E!vI7hU%C+ zu+Ow5QmPfI$Vjnruos8l8zXMoh}&-6lED>7NxmZ4oxPW)5t~^QX?)PuJ~>`wm1Ak) zb{A$8S9rIGgyGI6Ud;p|nQR~tLZ@9M4(t2dT>Z1LWp~ot9X`*2_HxgBVIp5j5LAJn zHW``B<%#BG!DCTpR;(@v9-vEUt=&wdLg+F6o5Jw6*eQ!|0-* zC7%`G(2u?2O5EG4bZ4347<6o#R_>E!n(^Xi4m^qkEc2`hBG~ISk5REC5@ormZ*sL1 z)RuS|ao7ELSZm;!4UirmK&tL7?7Cc?cK1AVxwWCFVT4hp+CF>v&SLWBx4UZukWls3 zT$xar-RK+B>EM*lNhWn6e0ES%{)0U5f(|nebkR4WYq?CgL9RaUhy)I!TT4;pn0DeF0w+|g^Gg1#bZEzEkI%~~LNF8PjhUAJYvR(Ng>W>P^KC5e6#+c3dQ@U0<> zC>X~Ls-zh0>Ty_z-8$oSFVfYmB5qNx9OAd#alkWZ+NH%`r<(B8d53{=Zis?M(|8GD zH?w^z`2-4Vql-PN@R1UfRk7_9T9uZzA~3X@@w@6=f)MzJuA2ll`4|>OlmTD|QSq!v z3e39G?PKzg0bZ?v9|W}f!PPjTV1yCMlvTQNotT+TG)>UJ*a?`6hr=$7Oq*D(RU@^; zpUp1$B`Lm09h@qaQz{Gd9^z!{e5=iX|LTqEI2JB?hj6Ee3-Ffh$QAoKV8<^yeZn6R zzAY`WFUnxK7DB3Srl3J(_ z=pI8ay_P#>*bccBF^1#eJvR?(<+zb_$>*kYDOl-!z#mm(tau7TZY5UbY+KIF$8d|_ zTyE&j!!=N1fM&lC-SPb~6Y^iNoE9$NdK&JE3f0oHU`?dunRBa;%cNki&4 z$~QJDM`M7NKW*A;CN@CiZ`WO<}iW9-EyBosRVmD>A-N?Mi#QiY_Z7M=QU+1gUM}_C zIRBG?s6XIpe`)xK0?0pDSFv8&!rt`ve}F1PXEsDRnG+K~dJ`?tv80}{ zGLQ`y?GrCJ&4f&rnEeive~)S%H{n$Z`##3PUx1%3hnHSSb_&<>cUG0fFV96O}hYW}YRA8fkR$Gg2IWWaNxPhd7O#ZrB z%3p3AA4sMA=B}zqpR{`aaqxc&#_}}fxFJ(!h@#Oh74WA4XR1fF8LzNW&Z`lv;h1_N z@lsTwv|~Ktq~S(!$v^V+hrs<$WPW!I3QVc^@rBU zjgT>SBx6Q9SM5cyw-^=tcCLSuE?2>sOmW$$j zIxae92(2};Q`1h4sI4csR?AR~SjGg|`?BM(%iP4|4_3uEPAX9lxNA8|k`$p8Ivm)B zmvul6bbfl{%hR7D8JE|}fQNGEu=z7mEd7;yE;nrcrM$9dKtd|8^5c2Bhd>nT`QtV6 zYdQJ2$w+Nj1{S={k~aaMf?}TdFYcOSv87J#l+VHATE{tFHnjD9svdQc`Q6ttA~*hd zEx04}asCSRq(7wzVoJr?WLgpZV(N+GyOinx2&vZ(w9R!_TeeAYKbky4;Vy%j?18rP zOGSlW02=S{vApf;PLyi}BZHeW>n{kPZ*-^Z5_R`M)!)!OcJY<1f&Skq;|fpuTo6RL`>eJHC3Ka&83QxDychZSj)_IbT1BDw-YCa)6>|a&Z$n+q#ACo4?n$D zwYkdDAFsU+tq{qX^!}l8F}}=^RqugwVZ3NHSEWHd#pRxL>{<+`U3b14)0SK5)SD%W z_>N6{Up`q?#|(+N-Gt(1%cdfAXiPm&(XW*nX(qF+Cr4w`$L(*U&;6rtBZb#{f+h>= zdRf&L$JNV%X8n;x`qhdoL+=Y}^wa$5v&uD_YrP;5=r`igXr~wL{j;iJQ^yc!cA=il ze_EiU(0BB=X!Yuw+nF4tEVs0Ep4q*U1kZIrC<$KrYm;Bga)Rh~v$)3L;2__Eb?rPn zC?d>H92O}dr_c=tBQ;BJl!SqH=Qz}%So)N|-n){u%NW%1j+fm)1Olq#kT(fD%M~Pi zpiJr4I-Hs1Tll`aXusnI338(VzT3=R)P|3|IZ3t^kC%E$pp_c=rP8T9Bud|Of|Xa* zDTGV_g}aLgCi8pvj7ih@+K9||d)9Y!0UbCC?P~3KlaTN;BuvUu&-HUb7LN&fEbOIZl;Ln9+>aC6<4ElJX|ec+F3Y z-glCS!vRZI=p=}@6svi?Be#%AM9mIeFm5EYm~~lMXxoxJ*8eTO|EA{rqEu(0U1nZ} zvC?KU`-+zl+SfYeH|heH3&vl7OpBKj{^#QtLk8#9Wq$mxC(>3%wjOuODko?WC~^gC zHhb_kTzhG9`UB!AREs?8o-KXD_*tbNyFpCWNV^5YfmaQ7PR2xr)YGvm#R#!&_9q7% z*IJS9NgE`!^E6C#lbQG`2yygzwiy2Qy9*qVQ%Q=i!5?d&%f%^gY+~Ou&qV$}JmFgK zd=ta5gH)Nd&6f10AD)rkXZpzGK)}3~ACpm8?c&S$KrNZ2^GXdj=Smc>wISJBPg1NU zCLb|~p84+)WNn*sg(u+`)}oCo&`m8jEX4JxMkQu2!I^bdQF)o=b*4Gc2neX&by|44v*na_6{*6}v diff --git a/doc/tutorials/features2d/table_of_content_features2d.markdown b/doc/tutorials/features2d/table_of_content_features2d.markdown index a5d5a91676..37e4e41a1d 100644 --- a/doc/tutorials/features2d/table_of_content_features2d.markdown +++ b/doc/tutorials/features2d/table_of_content_features2d.markdown @@ -6,39 +6,51 @@ OpenCV. - @subpage tutorial_harris_detector + *Languages:* C++, Java, Python + *Compatibility:* \> OpenCV 2.0 *Author:* Ana Huamán - Why is it a good idea to track corners? We learn to use the Harris method to detect - corners + Why is it a good idea to track corners? We learn how to use the Harris method to detect + corners. - @subpage tutorial_good_features_to_track + *Languages:* C++, Java, Python + *Compatibility:* \> OpenCV 2.0 *Author:* Ana Huamán - Where we use an improved method to detect corners more accuratelyI + Where we use an improved method to detect corners more accurately. - @subpage tutorial_generic_corner_detector + *Languages:* C++, Java, Python + *Compatibility:* \> OpenCV 2.0 *Author:* Ana Huamán Here you will learn how to use OpenCV functions to make your personalized corner detector! -- @subpage tutorial_corner_subpixeles + *Languages:* C++, Java, Python + +- @subpage tutorial_corner_subpixels + + *Languages:* C++, Java, Python *Compatibility:* \> OpenCV 2.0 *Author:* Ana Huamán - Is pixel resolution enough? Here we learn a simple method to improve our accuracy. + Is pixel resolution enough? Here we learn a simple method to improve our corner location accuracy. - @subpage tutorial_feature_detection + *Languages:* C++, Java, Python + *Compatibility:* \> OpenCV 2.0 *Author:* Ana Huamán @@ -47,6 +59,8 @@ OpenCV. - @subpage tutorial_feature_description + *Languages:* C++, Java, Python + *Compatibility:* \> OpenCV 2.0 *Author:* Ana Huamán @@ -55,6 +69,8 @@ OpenCV. - @subpage tutorial_feature_flann_matcher + *Languages:* C++, Java, Python + *Compatibility:* \> OpenCV 2.0 *Author:* Ana Huamán @@ -63,6 +79,8 @@ OpenCV. - @subpage tutorial_feature_homography + *Languages:* C++, Java, Python + *Compatibility:* \> OpenCV 2.0 *Author:* Ana Huamán diff --git a/doc/tutorials/features2d/trackingmotion/corner_subpixeles/corner_subpixeles.markdown b/doc/tutorials/features2d/trackingmotion/corner_subpixeles/corner_subpixeles.markdown deleted file mode 100644 index 946fd77b29..0000000000 --- a/doc/tutorials/features2d/trackingmotion/corner_subpixeles/corner_subpixeles.markdown +++ /dev/null @@ -1,32 +0,0 @@ -Detecting corners location in subpixeles {#tutorial_corner_subpixeles} -======================================== - -Goal ----- - -In this tutorial you will learn how to: - -- Use the OpenCV function @ref cv::cornerSubPix to find more exact corner positions (more exact - than integer pixels). - -Theory ------- - -Code ----- - -This tutorial code's is shown lines below. You can also download it from -[here](https://github.com/opencv/opencv/tree/master/samples/cpp/tutorial_code/TrackingMotion/cornerSubPix_Demo.cpp) -@include samples/cpp/tutorial_code/TrackingMotion/cornerSubPix_Demo.cpp - -Explanation ------------ - -Result ------- - -![](images/Corner_Subpixeles_Original_Image.jpg) - -Here is the result: - -![](images/Corner_Subpixeles_Result.jpg) diff --git a/doc/tutorials/features2d/trackingmotion/corner_subpixels/corner_subpixels.markdown b/doc/tutorials/features2d/trackingmotion/corner_subpixels/corner_subpixels.markdown new file mode 100644 index 0000000000..82b33dd256 --- /dev/null +++ b/doc/tutorials/features2d/trackingmotion/corner_subpixels/corner_subpixels.markdown @@ -0,0 +1,46 @@ +Detecting corners location in subpixels {#tutorial_corner_subpixels} +======================================= + +Goal +---- + +In this tutorial you will learn how to: + +- Use the OpenCV function @ref cv::cornerSubPix to find more exact corner positions (more exact + than integer pixels). + +Theory +------ + +Code +---- + +@add_toggle_cpp +This tutorial code's is shown lines below. You can also download it from +[here](https://github.com/opencv/opencv/tree/master/samples/cpp/tutorial_code/TrackingMotion/cornerSubPix_Demo.cpp) +@include samples/cpp/tutorial_code/TrackingMotion/cornerSubPix_Demo.cpp +@end_toggle + +@add_toggle_java +This tutorial code's is shown lines below. You can also download it from +[here](https://github.com/opencv/opencv/tree/master/samples/java/tutorial_code/TrackingMotion/corner_subpixels/CornerSubPixDemo.java) +@include samples/java/tutorial_code/TrackingMotion/corner_subpixels/CornerSubPixDemo.java +@end_toggle + +@add_toggle_python +This tutorial code's is shown lines below. You can also download it from +[here](https://github.com/opencv/opencv/tree/master/samples/python/tutorial_code/TrackingMotion/corner_subpixels/cornerSubPix_Demo.py) +@include samples/python/tutorial_code/TrackingMotion/corner_subpixels/cornerSubPix_Demo.py +@end_toggle + +Explanation +----------- + +Result +------ + +![](images/Corner_Subpixels_Original_Image.jpg) + +Here is the result: + +![](images/Corner_Subpixels_Result.jpg) diff --git a/doc/tutorials/features2d/trackingmotion/corner_subpixeles/images/Corner_Subpixeles_Original_Image.jpg b/doc/tutorials/features2d/trackingmotion/corner_subpixels/images/Corner_Subpixels_Original_Image.jpg similarity index 100% rename from doc/tutorials/features2d/trackingmotion/corner_subpixeles/images/Corner_Subpixeles_Original_Image.jpg rename to doc/tutorials/features2d/trackingmotion/corner_subpixels/images/Corner_Subpixels_Original_Image.jpg diff --git a/doc/tutorials/features2d/trackingmotion/corner_subpixeles/images/Corner_Subpixeles_Result.jpg b/doc/tutorials/features2d/trackingmotion/corner_subpixels/images/Corner_Subpixels_Result.jpg similarity index 100% rename from doc/tutorials/features2d/trackingmotion/corner_subpixeles/images/Corner_Subpixeles_Result.jpg rename to doc/tutorials/features2d/trackingmotion/corner_subpixels/images/Corner_Subpixels_Result.jpg diff --git a/doc/tutorials/features2d/trackingmotion/generic_corner_detector/generic_corner_detector.markdown b/doc/tutorials/features2d/trackingmotion/generic_corner_detector/generic_corner_detector.markdown index 7aba636746..f10d3efe4e 100644 --- a/doc/tutorials/features2d/trackingmotion/generic_corner_detector/generic_corner_detector.markdown +++ b/doc/tutorials/features2d/trackingmotion/generic_corner_detector/generic_corner_detector.markdown @@ -1,5 +1,5 @@ -Creating yor own corner detector {#tutorial_generic_corner_detector} -================================ +Creating your own corner detector {#tutorial_generic_corner_detector} +================================= Goal ---- @@ -10,7 +10,7 @@ In this tutorial you will learn how to: to determine if a pixel is a corner. - Use the OpenCV function @ref cv::cornerMinEigenVal to find the minimum eigenvalues for corner detection. -- To implement our own version of the Harris detector as well as the Shi-Tomasi detector, by using +- Implement our own version of the Harris detector as well as the Shi-Tomasi detector, by using the two functions above. Theory @@ -19,10 +19,26 @@ Theory Code ---- +@add_toggle_cpp This tutorial code's is shown lines below. You can also download it from [here](https://github.com/opencv/opencv/tree/master/samples/cpp/tutorial_code/TrackingMotion/cornerDetector_Demo.cpp) -@include cpp/tutorial_code/TrackingMotion/cornerDetector_Demo.cpp +@include samples/cpp/tutorial_code/TrackingMotion/cornerDetector_Demo.cpp +@end_toggle + +@add_toggle_java +This tutorial code's is shown lines below. You can also download it from +[here](https://github.com/opencv/opencv/tree/master/samples/java/tutorial_code/TrackingMotion/generic_corner_detector/CornerDetectorDemo.java) + +@include samples/java/tutorial_code/TrackingMotion/generic_corner_detector/CornerDetectorDemo.java +@end_toggle + +@add_toggle_python +This tutorial code's is shown lines below. You can also download it from +[here](https://github.com/opencv/opencv/tree/master/samples/python/tutorial_code/TrackingMotion/generic_corner_detector/cornerDetector_Demo.py) + +@include samples/python/tutorial_code/TrackingMotion/generic_corner_detector/cornerDetector_Demo.py +@end_toggle Explanation ----------- diff --git a/doc/tutorials/features2d/trackingmotion/good_features_to_track/good_features_to_track.markdown b/doc/tutorials/features2d/trackingmotion/good_features_to_track/good_features_to_track.markdown index 7c48aa189a..70d25ab9e2 100644 --- a/doc/tutorials/features2d/trackingmotion/good_features_to_track/good_features_to_track.markdown +++ b/doc/tutorials/features2d/trackingmotion/good_features_to_track/good_features_to_track.markdown @@ -6,7 +6,7 @@ Goal In this tutorial you will learn how to: -- Use the function @ref cv::goodFeaturesToTrack to detect corners using the Shi-Tomasi method. +- Use the function @ref cv::goodFeaturesToTrack to detect corners using the Shi-Tomasi method (@cite Shi94). Theory ------ @@ -14,9 +14,23 @@ Theory Code ---- +@add_toggle_cpp This tutorial code's is shown lines below. You can also download it from [here](https://github.com/opencv/opencv/tree/master/samples/cpp/tutorial_code/TrackingMotion/goodFeaturesToTrack_Demo.cpp) @include samples/cpp/tutorial_code/TrackingMotion/goodFeaturesToTrack_Demo.cpp +@end_toggle + +@add_toggle_java +This tutorial code's is shown lines below. You can also download it from +[here](https://github.com/opencv/opencv/tree/master/samples/java/tutorial_code/TrackingMotion/good_features_to_track/GoodFeaturesToTrackDemo.java) +@include samples/java/tutorial_code/TrackingMotion/good_features_to_track/GoodFeaturesToTrackDemo.java +@end_toggle + +@add_toggle_python +This tutorial code's is shown lines below. You can also download it from +[here](https://github.com/opencv/opencv/tree/master/samples/python/tutorial_code/TrackingMotion/good_features_to_track/goodFeaturesToTrack_Demo.py) +@include samples/python/tutorial_code/TrackingMotion/good_features_to_track/goodFeaturesToTrack_Demo.py +@end_toggle Explanation ----------- @@ -24,4 +38,4 @@ Explanation Result ------ -![](images/Feature_Detection_Result_a.jpg) +![](images/good_features_to_track_Shi_Tomasi.jpg) diff --git a/doc/tutorials/features2d/trackingmotion/good_features_to_track/images/Feature_Detection_Result_a.jpg b/doc/tutorials/features2d/trackingmotion/good_features_to_track/images/Feature_Detection_Result_a.jpg deleted file mode 100644 index cca9a2b438298d1998e5d7abf944745edce5c4c3..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 35105 zcmb5VWmFu|(k?u>1-IZf$P5--gF|o`1{+)x+zB4s9R_zF9D)aTC&7azXmEFu+??~? z_1*j9`*U}%)xEm*eyX18+Piyob@ktszdry3$_h#f03;+Nfa1Rm@OKR$3qV6bMMXtH z`?o_wLqo@Sg^BU6;p5<7y&}XXA|k{mBqS!ICMPDLBqbz#&G4F%hK7!gj+mSg$VdyM zrlq6(j}oMRPz-boJWNbHS`tDM+W+VFw+}#w33x_QKtW;zAQK{?5F-5@0#E}0$jB)F z*#3VF4ILE&@K1#Kuh)hEfP{jKih_!Qg@ulXkA(z4LPkMFBLtumF%a`eV~{Xvk(#?= zk}>f@f|E<;WS}~!MUDO6=7G$7p9cn61%zZR+&nzP;l)iqFUeW>1?9D^tldLGQ_{9C zUdt)yS$c()Ht+niLx}|V$Kn5G0r+PI843Mgp8(-M4k*Yd=;+8O{}~1N4+la30}rY+ zk(N0cv1@Sh1qmZ>QRBD%OD370K!_V(%HLJMD-@)EUj7>>2_Rmbwe|Gh%VOaiZz5E5 zykZz&l_3a&qaqYhL-wLO>wJ;~8!-9oHTaKtiknUl&Xo-5S{+(qukb578#R-Vc8_{R z23f7Yt%$Vpe+Li|G<<(c-u9~pxFEK{U%_amrdMLQEr4lb88u92YLizLgHbj}!t--+ z!~2Nat-D9=B~P6jV>XFj&gGX+wml=R%8Z}`M1oszTN1%Ds6eOXjXI2GCZ zC3oaI8IR^eb%wXRj&p9mvU3$C8ERlXT!}LL<*G+0MJwmkJx|SAx~*Ka%WGLXR9jC?TIeo zj-Dh>SBr(++OK#wAN9PPCR$6p`~{fCT;ca}Z|zqPLKvi(CyB>Q^WXlDI~F!}XpD)e z*Ot)T4jTCw?Iv?9WfwUgCw4=*tzpL=fw3Nu_g!sih$)*v^}-ACj4~-U7W+veg4>oN zsmc}K;lz3XDD(K~cMSbvAQ%`MzI8BKUR=nf_vq?q3wb`XU>LGtyBw zD(a?3v@o<0d6e(^({!wFkxDpLj1?a|LaXfbciyFkjmqhf9Lc;egOrTNh3F0Rwj3{j zAyRRFwqq}5yVTs}HS{}auxU&D9S=$UKi~Z9ZEM%LVdvDb`(}m46-yJ%Eu0g0NV(8V zchZq#QNLY3i)=QHE~kxOs&h3vANOCEUAl)&^Z8c7q3ie0?E8Pp=(YF<|0~&f%UGWtn9KdoYTQ3%sMsu-0sob} z|1ab4Gw{o0uFm6+V7dAKUmpnA{-0g2|0OZ-{2?`cnGv34S_wrk1X&aTM2~UQ)51z? zEv65j^G>8%>YOu6n>rV&LW2s9-(APqkX2x-VxNLgO+pu$ht$*|I-sD!tA4b87fBbG+QELT8Caz!5 zPKx$kB#bj-{{n^=worb7*Kn~=^@yaCbq2)DoZCO@s0y~k3{ZieJPohRh(1Rc!eSP7 zjv(<9(bB-j7Y7r^=ln0UE@9a;a`@+=Z7&b$iS8dQ!#*j24PUBaD{gioZcbZnFe~0x z`COpHu!t;5!u?Fu^>O5B3y<_&VHq^L434BeYx?xhm_3Z|XO<6a(jimPr|Dlk8H&nU zQ|jTRY;c;p4#DBIzktv0EenZ%!4m!gu47AXcI2LO{1`ewMGG1k=doM%uYOXLs`(cq zlB0nWRw+Ii=noeYv18-!|2&K?*7;>WGwy#_qz}+x?OEd2>*#HgD%5^gdDwbi+2#C4 zuZ?z?*c42mdDoTs>~P)K=}*@?y^hf)-fmDC^z^v$%hQFtkEt+EF6(FB3Let(shDamK9OOv1rwPucJv>s zyST$qhrw@;Xlget*CzEx7u5Z7pIOxhlC>m%k_Rq{(7DfAlQonzf4^Zht<{sR2K<|Y=! z=f|Usa?4Y{kMOs6{sPV`Xz#~#+k6w9pQIB@3+~14%*~5Aje+rh0sclq@49DcvTy8~ z@fp#ibze$QwFIu)GcajNN9pcDY42A)1vEbNCl->Qd7m6->UiLvT|YGrJ?Hz0w<8^0 zeR|otF^gfTGnBmu z-uS26^)KN67+KfrIA;%c%w%U;rFs@#G&!hgF;g(Fwqwmt|1*MFY*%*P@aNg`GL&M= zJm8-0GL$Se|DaRgMWnb?XxFm!lhn}rfZ4Pwaf(hijjV{!Cxw5I*7Js*=fsEpxc_~O zMG8Df$7B{6{|m^^FKCG;J@X^`3kXzh)7boWcOg%#X#9Ujk^h(7o2y>;?eLoe$>)Ww zh5dg4Jzf@^IrYDp{R@~!)#A_i-&`oRivG(94}32H^{j z<-Yd(1z?`X&q+WBWAj} zTtc}8lmCELw*HHGkPIC84@OjEW#30xS`OwmddE9@h(g%Ak}65g&jP1;7{Ls7vUMk0TM%?>c=PW5q+`N zPLBE9J^2fx#c03UP??1LG(i7Jx)%QFxBKYK=H71TYJ?Av4m;%IZP(|-;tnFmRA-ASX;GVIOC5NV@HrMFso_-F#W@R1}v_;rSU%x=oy zWqR0nTvO|9Y>^%+fnzVzEEY}mqP>VV(}FwDpmwyfPR@fpY)&_=M2k{NUr6vc;-VzP z=5~5N2aa4`U7(@-xWjYDGur{JaWwJ0JH{+SbIKF|v{nZ=gj#$s)-*KM2C$Ee ztD6t7R+kJGh$}-P`cRf9mWMv6d*uzmaJ4xRUOllwHL+<|y_rn3%PrL3e&cE~6VXk9Jyub@Lpggx= zy%d5oD=LPmDZH5qTS<@bJtX0-0f5^tB;Lr)qjx>1t#3>jj`WbiB)}?rpptxB%GUa& zJtz6Vd^Vk6sk_biECoMdgwc0ZL5jM%(UpI(ua+*JH@(_N-o@=NpgscXIzx=a)!pyWpRFku7=bVw^c;F?4+|z6i#Njd~aEhPjZl z@?w~lbF)?VY(Z;$%G+X6J5o9@`Q}wL_62;_6{HAADOT|VAhCX5rs+woK(r{5dF$(H;V!0GXcFF{@>UhbaK;MP~(@-df7_w%}kR&Mp)62sE=FbtOMT{mjT zZhS)kr$3UO`*_g$!`ymiIUhBA&#NEf(-v9)UH?yxuE?;83+Ze0+e1QKs)VIr!3yn< zf`78O?}WE^vj@!(9+e*hCj(FIf)%1&jPC2uJr7a*e_eIQs^f{+X=ToCFS zLhLj;kxOoOI1gG{tMzTzajd330eRpg1AR~vM84-*ozBny1lbPK3JP$gf6r=L?~QjW z0E4=}W%^eY%QKTXqhwc7N zSK9mu==oin*tWROQ8L34ZP>|sD%&=NHiE)tjv8carK527k=do!etboDKSi{lY>LW; z$|-3{Wo%`(EI(pH;J8r@*F)IQkBp1^3_rp^V>KOv(w&spzS3Xc%6?DO4MRl(ZdL_E ztT_+_I*z`K#nfi1J$p>ac}Ap8{)wfCKRoPp@;o;}Etpv1qo^;)Pv4qX@7ZhKYR>x>MvmKOJWg(LuiWxzr(dD_xCDa==sw8AH$U=Bv9MaCCmkc!Ot!bRP%++zwY&u zk>YRRH`PayYt%SX+c(w#k)FP%U9+n2@#CJi_3R6k!Lt-~2Z7!T#=>hWE?OqB&Zi?Z zqv>w|(in%MVGce&io3RodIXhIv|S1deWRivfM*n{BV#6JPX(=07YtYzdqh}86}4qWXm z4V^yrVYc`n?X_vewePW^DT4w0QXAlZGoX(*(!E%RTZw@J1 zMb+DYgQ-%ewG%FtEJPR3Uhei9p5FA+am=wLQP(Wd@&$9YQexvCfzhN+xXrj5x?k=_ z!EnaRutsp^PpGY9A-^mF$5B@EXC;-LPXA_KzV4_xGqy)rvIOw~vG<<9xl8%C&9tNx z3$J`_B^{@)dAX60DUh_86BHSWc0a#B=lm^VX2#wmr4U+$Qx2}z4MENcY9hOiOF`>v zqP&;Y@cXl~m^7;Lk_bE)FXnZ&CV@y<1vCF9TVSjnh-zQvSQiCf7# z?)mZ@l^DqntAr0VU3)DBy?cXw@=hj2)DhjXW*UU)(_B=}fv=;V_uHh6(g2>M~Vd?Fm!kx)W5}!AwsZ=3Hn;dV7%X3_(l{!$5 zP~i8H>Zm$y%`-=>-?_>}mA|G{rX6-qI4G>0n>jWLI}}eRWGo%WqS3?cwGqm?971Y$ z;t8Wfp=l)Q=G@mJaNUs&A{!@F*+;e+vxNlYLTP4M@*) zmDUo@5A>)=p)_#F+s-yqgM)OGw5rNgeRGswjk@tgK{ZP}E|o6A3X#!s)`RVFc1-;Y zdZ1ADIkoWwOx>vJrd*VOiMF;%noBCsaux0v)F7^?BjNDF2ICAHOTSX<&|0?FHLd>p zgqsxy9*y#zErqmgD2d@(cIPYULhB9oQda60v_|LVg_16nE)X5}%hbav5o=5aYZ#52vu*o7VGcsI^-HvEJZa!EQ?-`IPG*`j0r6BM^ zAv1iQ%B!oDS3-)b`r@2%PfuAF7cC!z5U+?@EX0}mPM@;vO8N*6T`c9w-%xTQW}eD~ z`H4Nz%A_uC0O}2Ox;poRUfM)zXk5ZqraKbP)535&wc=Q0?FmwdoByvm?^&fSVmY{(UuLClD2Z*fa31v zKf~_{OY2;r3QhD%fWqx!C#aeB?hkeZO6Fzpn%C5(k~LAmvL8^tzjeY5@2b5vOReS6 zjmDM*ich+s1T)izd+TB{n3UKDI(f4U3tF($m8euU;#p3kvd4XOAxV%IYDwqvJ>w~D zcaV;xt59r_sP%|}WNNA+s)y#-9GsjLDxA});QoPti^;~V_R&AQ z3gX$j%k0F+M+zbdtNiv-jcWB{S8C^h6|-ij`n(z7^4)Nzhzii9lH%T%6rN)K8NsiR z+0U-?)Kz*$+A-U2F$#kfNz%63M1TDSezLZ~PPA$;oXD`od z^z5S=3f;wgp*gZ8XBt=k(`7Q~;-3Jba~FfT<9~3%d8n3`ly>=(Yl7K&->VAC#m?Q2 z+-IoDOnaP&lgL=>mjSRmkY+~T_@M+G$kT2`Dxl340J}8g!RfIUHku!7p5wWWypgQS}x`Ri5L+J?CTA79O2Pg?KY)h-|s zE9rte6JgbSPDP)a@>~T8rixlcPAto~;$6Y=JT(J!?j>k4IX|H#wq*!;lJ?v3V$`VI z(^`r#h!CVV2le7kdq~qz!xX%$HPPP#V-vAQ7_Kzk=BwZosX0~peec!1gn?svs~cswkn^D)}o?M`{@>*?RxS&Ou~#{uTYeDl|6 zhRh3g{Gyg_)qE*w+DU(cH(6+mrcv^OM^!fB6B*a~Oq9}03%xvVx&_j3iO6wSswdGbEntC+9lIE5%V zTKU+?)Ts4~%F|>c8oiAs zhPyHIIPVf1P07g7>0S3onZBK)qF6`)m)`5}h(XX#8ei|I)iPPzcS~k^UrAQO>_Uwc zC&V0@*$fmV{$($CeyIbg&NyT0idZg)q)z|@o1 zE5c-}&_X25y@PloW$5bdS_DR1HN4wS{M4Bk)`#m7A7a_$``&q-DR1E)Prer6Jep zZTXqww~f2n36ut{MN>jIQ#=ltTpdUtK$W<$Kxuk<*1r7KU%Bd~ zbCZp$u4zZxm8i^|G73ft`r$;QvAM?mMMOt=-EV!UZ5#vG zsuh~W+@PpOl2c08iAvjb-W2K{Ld@(|*UqQj>_P%5J$KRcg*~*aO*-^yT7Z+?mIwiTigSv!v@*iNl* z;}*P1MfO)~J61N%T`p9c3ewTnPA+D^n%hPW3GRW{&kRwWyg4^4EH*8*lqys3=}cYM zzc%#b{ODH-EV5WDbO)Fg$o7D7K5ID1E439MgA}zL^~sUrgMgvtZ5f|2K2YrOhmU@} zDi-1l7oXH$4-)KVjBE+wVB%@b$F=V+ zCbVrP=KxAAXSnA9Pp3iJzaVP~W|a-}U7i>um@q76nEZ6dZ7)~m{)`XtUVCSd9c8wZ zD77-QIYAGV%5XX0JR-M=P^(f*WCO<3I`2^SZYLy(me9|!hh;%9BwBZI$9Bhv z=EMz19cwyQ{c053UEqO_Q6>RqA9J0E7S2rRSYcM3^08N3!Kt|j;z?5Fww6?MF(ZBu z4ia0zaSrG>kL7NK#Z-8&A)8eU6b}(P+1kRb%xK&ejIdH1E%iyBYqYb@?kk^9f~eW8N{{*R-c zVN(Xl@f`0oyyMDP^)DXI@S{?E2#@w0SQK<1z;7&ZCUGwxyXXl#ayB=0Um{7ms~wf` zc(<^Wo*nSDd_=lG^bF`LljZycjP*FNqHo!CyWI-hLU(!w$gx_bv=gXG*+Q*tF#_(; z@Ah5UnHlcfY82h=Xj3vMmRAE6%Ge>HRTmpl#HHR&@6=kikiNW`Ar*b`j9#exsu^22 zWf2z*=$iGkWE9Qj4gyx)Fjn?(ef$)$y93a%=g>ZT|jr+rwyhT4(k$ z!kQOEzLH}jDNBP_y9^(0oo5}&eVAR%L5(Ef$Dwiqnye&ezYeQXjY`Qeud+N1i=Oak zc9I|Giob7EQfw1*jLL?7ZrEHq4}Up)SD+V66Q!L>_%MglCz?{ej)pC*^e@8zeo4$Q zvJp-<0CSe2ReQf=Zid?uBiw^q!m9nE6Z}tSUbz)fc*QG%eeRgCV?~MboJ4o-U77W? zv?Hwz73l$Z$)`ihAL}Q@KDF|5{Fj(xh_gTbIi zcOVcqs+yZ~_n1k%r~03>q0ZFnMb*Ug8fSM*?Ud`CQH#!}r`xUD?O%XICs2XguP;8| z1AU@*e*-=p-&*`ae%_H5CgqvNouV3w6F*R(gB2`>a~whtTn*)iP5N<_r$;I?4B;Sl zC14P_r-2@d`8W_6@4=})hmW}aY+vxY>HLYucp2H6a(<5fN%Bkkc3dbknz}!OvXW8< zlJ4r#o|FG)v3JH7dmz-UjEd5nm1SlnI<1)I;!c{F`Ha?PW<%c*{({#k36@D^XeTm0 z1vbQ6^>0-xJ|l&gdUDvi6n;YesuL0u-MUKvKt1($E7dj5UZ`}&uu!N{@MW@hv~wLX zf+(VHoprouTe`-2$5uAXc95J$LWT%1#qa+mQeBQ0;Og0vo@0xYxu|Jvr2pRSQcKE5 zhh)_1EoWcxAl5}y?X=80f^XAi7K+hBNGiyrghb&#+qlUM4M~nz^?oy@Rn8rrbZyW$ zPFfSJOK&rSID?X;*q)0E%iPIYx}*oZtTRMIO%c|utX~W=!}`jbR~=gC1R?zrG)j=A z5(_V7^@kGek_V57{(GU7PczM38A%T4TLp%(sKtTcT37II!_-@gd(dIL? z#J_Z%y8yD?hwmwgpzm0E@#2h;3rhQj#_N>C>HX9p*Bl<; zp#&*g682Zz(Kvvw+gcj4G=a0hxo~+hsH5%PezodX$ERjW8#~g6EGXC#35?q?iO+zA zJkEy8mgYE?5FpdAEK})snO}vvZ%+|e1+UUh4zp(7O6*WxEFWjKN|IV%XBi}{iaYSn z-mb1JCoj~_QJEc+>RjwtXlk6srarGWli<1!sIn1-yMyNr=k;&Y)Z4#tMdX6r__dtC zEe`T$X`wAKXWZov?$bp<^ka$Mb<`h@)--6M?nBRKGOFAVo?w#fO7HQa!D#04pQCC? z57ICr0}SgIL)NnJk)wV^cvU<&Mz?uXmkHo)^UzH$_Is;Cu?iVOxW!uE)m~~hljw7CZIz$K7tk`MwY5Et zOpG`aq_#}j%;@yG^ylvwk!jM5B3+{Ol^ex4!5h!m=9>e;Ay|4rOf4H{+PXeDRvQP) zsOy3nA)8~PVfGbDrOM!s6$egf+d1c-qP2}QYIj@j?3A6G`c*XuJM)}so|s=is72kN z`AT<66ikWgL=)=bwnQvdR$^2SbbRpxk$S{(Q=ay8)9hl@%Y)1Lzq?zYxf6~j;+CUc zt62QPd}YG+5L4Ppn1~~_16x|Y9}sJ@2Xqo*vF@~r`>p}a)M6c1NmQ&-PR02F4GBpJ zL-fB_s72*kKXcx0C4R33IvMikeepgYdU!Xkf!my*tn;Yvhra18mQsn^bZfraX-VNq zlwMRcCqwOOKKy~z^0(Cn;YRp&_oR$DLI_nKm(+_nuE z0B64-MhCv14qmZom7Yei3=`IjtlkxG5N}j%1~?4SV8o@ciI!T+ud6>S5-Jd!@&cqh zUQ2~=5qE@?W3>incYAZwTk};ftD^Y>!OXLTxtEVSqqd3(O=%dE?i#=x<{^*nM|088 z3`PYarU@@q8(YtJKgtpq9UdI5=+oAc)kBXDdiB3>Ym#hHg<$=`+e-36=Mdc zJqe1gl6mPVk{$H;LkZtQh0-Faf5KUQ`bq)U#tbu>&Ehq9WQmx{Q4K(eRDDfk#D)6$ z%)oeMluyecJ;_nqSqaDXw)>b!!}O6>m3Zc?E>0Ro?Bi}oY}rHihdoIO2wPWa=@#(v zdLrIXd{>XZ_W_B23%ft@wRS5kL;ZYKDUpZQ*nMU*^h zA?8R{=b~{Zm1WYP>Tu+|+QzktdRoEaA8?kPt+o6cdw3ZaYq${A2+P2qw(-s6+^XqV zW^GH+QjU1C1`z$J1>TTv0;6K)%TbbB^CB|p$SlFZfAwb)W*iuajT6<2cBGZi5%J#%M&Y*Fh{36= zRGK4)vbO#&Zj+_Wa3eO~n5drLJY}K@JN$;HK z<~N|i-_ly2C!=L@G??702Q4Qp1G>GfV9znnEov(hDTdg1LRMvZyQS+_cf?V75UHqa z*$*laSXI^@-et?L_{Xi)nAv4aoUM(F^lzyMgSiKk*WJT1vRv09wOt_oeaF3RWazdj zWG9F>-??y7;DTugX$Al(ZtEp-U2g`G?wm>L;D-#Fi^I7bc;$+|Vb<_dfaR*~8Jzfh zxlZ1tWB`+8frydPp|yenPk3gDMy3PhsJmfUBWNeCJ!LKv-xe1!6pWC~qK9p~;iaK$ zy0g`JYwMNUmZBSSIPB2B z0GqqZYU;F5>viCx&kr((42b!}n{r;O_RdJ$vArM56m>YIA;q{gwHW)YOpt0gD~n?2 z+o*N@N-p8@Xe1@{k>%ZYd$(p0@55pej6a1%Rp)+_Z5TWQ(X+cj`I(iqurLNKuaW$5 z%v! zI1bb&zul+60M?o6k^lC1tY5Xp8gHfRtYkPV@J=e9G(Eb zX#|+89MEUDqaGOPmVx>G)}G%tidmRKl5hS3-U`JXlD^6R@=QejHn#r@*u4Cg95aU{U(drNIbnbHzCFi^97d?B6}^oPW9JxQ@^(D5)?vO3gRpS0+XcnI&>~3 zwRjggeGnnvwp(>6fQ8_RMa!9&+U2Y5@>9FqId>esql8{0HKE}O#j`Hm0{_(7f9bHV zMP=YdNe{2Xn6;DUW6b1X@}UB*fOlYx54IaBAQ-n~$&sL5nZA9z!gO6-MMg!3T9)-Y zp9+2DRt&GD)h^NS$BLQfIG=ZujUezgg_}_p&ATtWKT+)@J%|8{RR^D@!yTGB^H{k? zZ1j8tDDV+CB?^(IdcayGF?#d?F5IBqkHTMdcIMT1amC(Lv$c>&7~6MT!q~Nxu(Xv? zQ>%x6Xp-rY3?gh7TMH#*yGv!;Dh$(=Bf4%ujYZPzU8B}HZk`5 z)Qjyey8RzGxr$H)iZ`%xWui2NE}dN*!x5xL!|&=VfY!=lq~jhLRH?wSALH7S%4q^1 zo4&O5LmH$;@lY>lAVQ=Z;T}NQJdzpSe52W1b`d-TER(VojkQYTeW#K}{fO=6j*}G= z4GHA*944zGShuC80iuGy^Cg2;4z)Ez)#3O=VV&p5R{0s!qpK~XmEHmy;=iy6ka+g| zUL(6F=-72?p&6ZxsL95uqD_-gtw$0-0QRmGm)jrlLgi^wX zoHD?+o(`3z=QZA6a}Fgv9D79f{(Svx={}F%t(M5(fFp4Y!BOYU_ti~t;@;N{kVWIe zvpK2jJaCz&&~k*55QK@ZYYzinAQ>rWzYUWQf|IM}yJW{7onNGey!QhaLv{k56lDst zL*aOz>;)me&`TpId`8LnzO-j?e4)XjOe1rW2D<`|n!-~rKlb|WDH!V_1iLyz4llrQ zUBeRk`u0%~xLLO9G)WocL+<58=6*P4OPUb7$it@a7fmD zOdcF+p`S5pw83#BHY(E!T80;z*278tKz41JvtUodAGi7|IY0vg)f~odX1Ff}0xb=( z5S5ZzHWbSWBO%kSB*I*frl*NC)se0muGM}&8gHT|8@JmqE%0ldpWlN%8p(>;>_}@a z{=U6KQMkGVdO!e^59*_zaW(t&IApez!&&mVU%5z`NvK`Id+L0$bZh}k)q&MURWneb zk(r?;@=IQoB!jewvxrqW);C50u1}{s1imZMuz_bDc0f`FUWY1Iw3W7|cyzT8IXg}? z7h#Jz`%_iR6-XrAN(6{&)T&k%&hMANh(^kQWqJ=M>0=$VAIWuQNZiqMRqN*5sb&q2sBz*+LCPb{?n8Atd-4r{c1;t-l zN0t3VC(sFV&4kGWk;3O})i4R|@dxh8q*Go@CY`l%wsqTC;(b85nna`-V zy=|Ibog5p}A;Jayp|gmvtFg8*(o|NCX~zq9;GDVg_1RX_a7!K<$|0t@p#{pD-vwE7 zri9vS#P@8C*ebcb%52Qf(Q==c-%r}$PSDM#gzt!AF(ZLGaN74e=vOKhe=|u`zVeA< z5zW`OSBbgkDy%0^yVKmXbJzae z*bDk|=}_cv+7?t{ky_F5b(-;>XW0FnrO2Oe3W&xbWV4(Te;^KuxlqdYgxWf>`Zc3>D4Ko!?^(ribB!c2AJP8Y| zi4xEgo68sD64%I8*ie!4Ilibq>W8K#WpGb&r~2=A5}9*yY?YCW@pZ4cM`6koUQ4J> zDNc?qUXtoy?8Nlk(pGYSs`DMOqSHL3gKK#gDXH4Avovb0AQ|Y%F%g8b&8%phvQ7ij z+zDcaoqeSLJ)4eigF5K?h(GQ{d(<7ohn**v|ry$LW7l`g?HmyoHD83c(HJ#}ALWxFO52^}H zr752>bZT1r&}nI_T+^Vi#4JbkSh{5yC6e8>fM_Tlh-7YeO}(&p{a4l^c8&ZL-SLZq z^R(aLQacTrHh?vCqtbxPD1lty}JPfMnUu?ZeqpiMDlJZiq8eg!0?> zQ^5=I9X}O+BCBz7(E~vWtXZ`z5|ghkjg%aR5HRF*EF9M5K$A`Vpdo}S*e>m*R1A0D zB(_{)MgTzunZq9eQC;r^>f22ctPl*Xrj3}|a$lqvIKYoU=%c1mnAcakZ~AiEi1;rF zLE80S>J9rFhyY4@^iDZfVDj~Zu3azgAg(dg?gxpb#4p$Z>(Y&=wmU6qD=5ukp zuJ|8&Jnx(ww}E`Elpr#s#h+42SbM!$x7%$s(r@+<&8a_I@7Jk)D!*-#anEikl2%h8d74fpQ0olIlqkVhKu(qHgcyvsaU&VU324rm34xmPDM$<1M&i^ zYs#$T;P5Gj{ivDB#ogU7}-TijCAQ>Y!aXy|pUR*W%nfrg{go5>Ut zPt#Ww>)PRgu(qvhOS_?^!|TIeD$0<8jDDHE-j8}{!*DmCK7IM0?zvgq~6kkqp+Ihw>}JZZk&yFy0P;Ykb5T-3_G7`BdwTf zMKwFJt_0dc;Ozi6vNluk4_}^~tb6VgA2$X(L_GwCqm@64`+Ho9?84uKsB2Pf;e;57$*f)!T&ENq8C%F%X1c`CNc0THPKEWCZ{a204*4I1-K{8?|QF;m!NkP;$wAiaI zuq)3&6u)-gBkwGShssV7nnSM+3PCvKP?nyX?1nS};rTjJW7RZc4b-IA%}oL)5xn~4 z*u@7T)A!nhrRlOKtUrvjl-hB0>bzFm1Xpt1Yi(@58S6)Kl|aGzmD-o*g zMHXpAl>SPnv1dwa_@^}NH5D(6yD%rDqX0=#g&$PYE%E_73UaAQ7t^q=t zTE&D1W$iNECMlh|l1it1W6qX~^jiLj+|`oia(DbdYb?VA4hE1(zBJVa@^5hy;v?Y< z-b8JmMIK6~`bk%N*Q%HSSv!Zyq|%y);5kQo(@Domp_p}{oIE9&UD53gJ7pDJYY-%% zfm(?HTRlfVet5Q;PJGP?pc0|FM%Iit3xm zOtH%kdF#mR+#6rmrURc#o{FY2M1UshC+trR6U1ctLpUcXB7VfMK1_thxmEWNQnUA+ z7RdH*Av=hMI?#mj7RkPU)2QeV1%Czwuhhv)X{qdz0$H|)@R-kvrl{S$k~7P7fL}CA zvK}4eRg(9P9Y>8_01nC;1og&ujD4{HvX*xc$79!Zx8oxL@|8MIIG&VLS~tZoWEHNIoUk>MZdec!Ju>cH>jNPuu2yZlZl*vn z#W1c1A61>9HKoZR98hKyI&23CeF^0BKF&Xmyn6 zL*9Ifj?jUaQglv|Iv~_msNwsPS8Hl?dSM0!){Fd^bT7f`uF--~7GS8pSHpti$#9z- zRXDRwdtaigmKj)P2UAF(_!9J8@4K*D3K-f*F%fTqiukr}U_Md0Fs!BA{PNl-y$_^2 zGAT`mF32nzEYJqaFma8!cd_Lh#ddsS>N&9O$%fHf3!sl(Q)Fk--Hj#6$lrd#YuK}J zvl_3E#B9n4S;6Nmt)cs|@vlhGs5B_&(RUpud&3lax+LFM>1F2I>#E!SHhytC-j1%+ zSzyO#8L0e$Vd>l8N(lDC z54A{M+LwxhAsl3p8U&4UTytMQc6=Wwfzm2t?BC-O!oW7-Q$*B5zwTK<6TQQ)z zr<#7M(?)Q7%$c5C#0BtR2A(_K-|j4JK)&?_I?=QpzWr2WS0WcuOn?cb8gJ+t4I1U^ z{uN*&O17JSsgblNK}fSW619`=>{iCQos{C^AZqGXJe;*%Yw+%~H6Jq(RlITzmecXi z(uheitzPaQK0e8Dc4%rojutdZuGY+ZgaXPif-Kx2k{_`}82EW^}fWaC{{ zVLqs-m5AX>^y+-w?)$B`cVAmvsz!4?y2s+^R92gsxkZ~~w1rT3-CvKjUtA+df4_lN zo*}MQLsmd4Qo)Phmu|IgLvw}Fh{6Hew*zMiE*VyrBN-4b9dp0f+co=qZDdGu$K+sgf1u1zpmV#OfB(i7Xnz#&@iA*M(siq0B^$SNE*)MQoXQDWa zb01q)du1A{deT7vm!}(fQwAt)%2-sw)~^xD(`{H_==AqHnD9ZDs|ge|mgp!d_m zP5ul<@z5pm?0&X3Y9+x-AqvoAs^D;GM%yo0uqCVvsyEy9>jj^xOuJWV>CPe{UV+SE zv<{pMvbBj?`){;n=2=w|2a^bngZr&*)eCb@XA8T#WzAce1}3K$s(H+m!KyNa6&un3 zhQO^h!j7^PjEkJ_-@?dSyvAuIdYg4Pqr>`DD zO}neXwIizGO9nnlXJvA+K>gA~CKvy{gN~}hh0P;_+)Kw%0YF4_BkJkIgUjUYt665q z2M?d~k!Uu~(Hwjd{#C~vElIZ=ccuc)NAss;ngGUECO)d?5_!0$f>EVqbEvSX_G4r# z?1gBXiAlpE z8E?SSPLOk)A6BllfWU*TQS3wQ-3}b6PsXFKuiVj9REwBL4IxI@IMT8)sDzK_%Q$oP z{O7?`%%0!Q4Xt9| z$~!~}eQbZW!)q&$apw^%BcqjT_%QFVAvL4_1*$+*zuUnM1p)3a&&p@cmNHC;u0Jv~ zDQ?e*!)@@KTWUqMUf^o6BIB!-E!)zj{2O|sTw=iA8UmfI!uEFonH7QN2eb^`YvSHQ}yTgwtElA3arH4oGf($&PL zCFd#OO40~kl@tXkbqy_AN=XHJMT&w! zu(-YXB3H9^Y#Ek&tISk5N{+O*5)#|enaNFvR_9)ooraUWhn6&Tx@}&kOr8S@^yLP? z^f$S%#%X`F*6K$Z@#TJaxwd*mSzYD2bfzLOnEE1si#Wa(y9)uiI*+hn7DZJ|X1c4C z#jO)0sn*|aqQhaQ8dr%bOMn$#>r!^x!b$0g?(u4pl_o@44pgV5DrzzS95kM2T2*T% z#^FQSY;d@^)!DTQ%M|*BIk%|Ha3)81fa|TM5_BbILM!u1%}bnvG9YlO6Vy;Ni5e(?w|x1n=St z@mk8~wg+ZR%bTE6oTkw!QIuw>#XlkEK~hSMiAYJX2c7S?TO3QsFuTr7l}M*YdtBDz z%GAT4a96`gP$gd&g`|sHdk}FFesuJ7)WT(lT`~%Ex}*T#5g?5`)yti^bKe>oL@m_t zzF|-|-B$NLDweB!PoMugxvlVx==<8;PQz55cMyTR6LJ1b} z5TUs?^#VEhW0eBGDaXYwyoV5m6oTTC7q~)!DOJfNkT*Md+Y#!$qEyjSi#j8Wsp>M- z8I?GXjFkeFCrvZk9OeSsszf9yd%cvx=)#c0lH1ER(@2f$!?=Ln6aOcOKqlH(8|)FokilL zl&f+ymicet-rR{i!BESP>2n&7C8rl6A$8czDYeK{bpkGy+&hANHX_{)65`99M3Gi? zbKOB%JXFSrmoy+e<6K3RC=`O=Pyr!U0_5D-1*k4RaE518RZyy+C%seUx2-V37B!9? zuGm1jg2j!-eQnJ#4s3bsZFVEDPciH zO}-KvD%71ibp;W7Cwt+NoaBcs+}@2f)H(!hF6tW2qq*L|08i7)9cH;2Be5k)sLG1k zRN7|8TWESBlu7W8T2xNZ(hZ8nG(23aZ6&8j-SHv~N`5TeYpYk8X2(1LRNn0VU0V7V133P3>uYK)= zY=um&&&t$VRYr+CDk2JF)eeHfT1YEO7D&>xgLDE&Ndyf;A4%HnB|qdzj(b>Q{Dj$A zJJyqjO2dFgWD5?2K>q+?5(16Rb`@*USb*bH2vKuV=oJW&+I{#Wp}zQ)xJpBcBjFNv zO_O^S8-P`8Q{AVhD#?>RO&X^rbB|VEb$X52VF4z{MfbhQ+h8~6vBiY}%F6SVeYlx& zEQ*C53^Y=nXenw$cU)Gb0!T{20tiU}kWH>Ywh}YSD-{|Vsu`M|7^@nBn`uo*L#dF_ zKqR4`^l%+P}AgH=@k;BxG5`AM0^je!~%H5yU z6%n{`nW#AruHXNjmS~+B%EGcaGo8%fse`lZ&d0vX>q;*)1K2EA_?ox zX^q_HDY_c-BH4(VM7d5jA*ov5BKkEGa1wUq;^*_@D_M!8%xS1`U6^T-R8<`fcu<6? zNEcPezn3#{%Mqw7jkM=BZ%mgQMYuX2+5L z09?!q-e&tnF6La}NT}ynFm#4RO;(UwbdC#JE#g?Ul%huCkQ~Q68C51P2@Ox9MuxW1 z-vQPV8@0vPU^XZ~waTyU++j9}mZ3+TDsxp7%vzrz#Uo11j=>(S{IE)`O07qvy&7c2 zu@%m2YC4iUh`!jY=~mj)^I|_dMnC$S+rlb6Q@l>BI3y4UP7ojq>_?pnRYihZOj!|| zrj*%CD2+<7Ab^gy=YPsDzYWTC33TyRV9kPLYEl%COKTSwx7{RLbe?t|%w&30nYn@+ zF%*T#eDI>i-&@x^TcG*y^+D|Y4oaI+msY1#VAHN;H39x3jXJA*8Mq1DkBlxhIH$eu z2n68InBT9BL-9bQ@a^ZDZ{b0zK~u@6)GEwNmnlhlAVP}!yJ^0qT>M*&BTrH;K(+BY zpG*uOMK#Q51zHX~K6p}of} zN^2^|W-?<1yz}gYDGNf&YGr)W!y@NbGrrqmr1wYb+pgPR_k00M)3)Qz%5FEn>3Y{L z<@gAnHU)Z~(^8v2Qah5hHkQEn{u>+K<8jl=95cK(Do~n!msDj@BB3$XlNqwKyi^Lg zcy~&9gRut_18IdZRR$_m7p6M}Xgp@A3O%6V@ix>w3FHnwa;$vlqN>ctllr6s`;?$ozXkqJ(@ zm)k_iim5glbu1_0m`W9>D@og#<+Z`}Vs%=cBSfLoDrTnJO`*g_Gi|J-f}*3j(mWv{ zBXPIQ(;%*`8!*Xz=nN%?UMYaJ3H-Zcjb8Hr#tgE?bPefTUoLvBi4} zW_!$vZm9NwPnQ|Q!Jx`i?}SMUd0UEU)L=$kUk3VlLYuK>eoBp}(q&wIR2`wxFPtD1*=_TzZfC50(Zfe*VqhVIDPy&h7}%p-77X zx2sawl_}JQ!Gzguv?Uu_$|RF=Pfwc>Cp22jH3o{cXl6L&&-h$MmqL}p?%Apo*#c^( z%ZB~D=2QOwB>gR8G3HzTuUVLRMExw|F(#wyr#1A| zVZ=-By5Aj2ZN4m&rNoqm%9>FtK>v^JlLu(upSxNablQ(~V5x!9asoC=)gx#k?G`;|tSiC20hGN&=eCPzz2 zQnjg-@d_4xC{jTLSlFA3VOG0KhOEU)u0Kv@0|QG@TXp!Zsm(V?C<{*%Sa__SR=x3H z)+fqcU6!IpOUs1eaY0J8?u8C#)J?sgN+X$hQ8PUn7lm4wncv+Zp<8GW#}CmE7Le7#Qet<9c9mYx%wTnt>Kn zXwhplnk-tLUX>b*gtY>uf)w+BI98q^)E!Z}d}vaQ$@Sq|Bgk@8`0KCc58Q4u#bTa> zrJ}~9_(JTGv3*w5cCq#xJR!Dglelc5#*r;fq^e_UjQdI}O2JcZBr569v2n72=WF3d zBGZ)aZF$MrJ2A64!r+l5_{A|0ligFBi`iCMl|ynqXz>W0G1!EvLLETe2T*4mTVRj;%_$NH+=6lYWBt!Y+30OFKI` z#jxl*xAafl`^bm>eY%|VI|NThSvZ({l$v36>bIh z<^Z_md{xj@iVl# zR0bdl>{yc{YD(4#X~xuE0SQ8h4ZnENF$Rv4nG06VvK2M1{vr(5)X>7%E zA=1mMVIguAdn<^v0F{BKNLU+-+V;R|{SPrmtG70}Nkh50}Ta@6>E_xKpKnjPf=!VZ4ih%=vgyHF}2>q_jjP#t6jVDoLA@ z0`X%}vXrW&ABMR|jHZJUgDfcNa5RLVDLkw#bG7a^2gnMg=K4&yfG~;C;npD{F-uLQ zZpGp0NNp(H1vXNyK25reZrb06?{hHC2$K7ARSD{BfvHMyEt~NO=IYBp-qt5h#@I`D zB~@cgn*+bWW@wc8Vw~&NamJGB^eV(?9smdmAlrL#zOQO&mB9|++Qi5ygOUZst|WNV z0$WK*H-Q)BX`f0VTzacip9?MIxpfwrrZ>RvZA&bz;H9ZWO3Uza!PV$0R)m!jowwvUA65-y{HaHe1u`YS-z7i7aixaBnNpIwr^2s} z5=HDh`3`u}ubLMxG{LC9QI8@VX=;dwkl;9m*>{4Z-sD|YHx@{>j@r56eIsq5pn?dA zGIMi(%=n&E)N6F^f>60u_CIc5etHm0D!yK(#tcY)p3Ky}p%NV{@i0lTDOyc|0N&~r zu<9>^EJqZEGJFWGEQexFQx69~5>LSt6b)O87nDHdd!3HsbNg*Urw7|{5@M*hR}~>6 zN|f0R+$A7*P0iJF&fZuN% z?B>`7@a&Vc?>ldc;0VbkD~%(giB6=GjpqEPHs0IjpYwc*bssGET6H`_Qcw_t#(7Dw zy~$O%o8cmjSd$L4y3E+KYI0VR;?jj|&45u({z=^0_S+1zBPLZS@f&qKX2*#{r~~9L zfwT%eQBmYbr#%q{O9@=C*d+=I7XhMfMx@x0%I9slB`>zyWD#@vw}f=fJ5I`-cE%%l zfGw?<7GZ<`Dv7Ma-mwNx4#`DXiN9WEdrLsp|`B zLxSz4BYzqPF=A{11mftSMRpx3qUKl2?Lu}!bvqVR(&2d^>Iq6pKu9L{3b`jpBwKn* zvsYI$FiC!LQxrSFb~+kSLRG_Xl@DJuUi|IzV)>s?d8g4ODQSd9TB4^*vdK9`81-wnKZn-vrK&IAZRHkWcEJ-cv5#CrsXh;@2TI6&5 z<9thYt9KN5jyCIi19FupT!DS8dF*d{9+=9Tj>o9H3S91zV~2uL*d+w>7w9i-t!<7F zGYwhdqZNxh`T`SuVIjW=1xin@*RUU>0eTa}H?9(;>6Hnw7N<8Q$*iF;;H5>krJ@R- zjOrUyQD9V{S9_=hUrIqI70q`l<;YW>jxzk1(wI|fYEs`@u0Dlz?8EYxd@OH;ZP+m8zE^V`1jcS##^!_*5~>{e8wcaw^EqQIF+ao zsMv3?=g4Bd%13@x#vl%0^|?2($uVhzN!KkZ;UwJkkH-5)n2MJ|mZltf8XgWRGLnHH zpn#&JTzF41I(ZRexWKOKCWhnB%%w_`CK_3BauSV6Z9&eeFe3ZH^q^ zE)WEy^Q|ooxM-OvNR#h;;?cfpGYFRlv)51O#uQM%z-8q=<0`e9} z_&^5jgr?%=-dp0Zhc=law)SKxnu^0w6dr5OVl0kVKN{HeX&hcPnwLX? zN^;azhbl2jQu}dTLX;4;98RT>O|^^M5y)+Zj>QxG4Qa&h@Q?oh9IRCE>Yk2bEknLK z{1rG`LYzj2kl?lTTH@BXhtD_En8a&mE5ki zI@1)%?#77g%5`zv97yb_c(rKQkX3VHppjy6#)neoozSEh{969^u9c)Zq8kFDc7GbY z#c@wUYN-N|eKH%WshgFAABYfvzN=YW>^Jhk_rr;^926@<4!FNAdglqP6aKu0v9AwXJ`+_ zrbJ?{^M#*;Ex52gI&`R~Zl?PUx4&E>RBDKNs@*!H8hkkkaRr3Rbu1kvLgWoaCimQY zVSTO77B^@V)@rfo&|lONxnzYT*rbFKl@DRDx2p(s47XTQ&(5jFfT_iR&=Kkd4YmkM zloFLRb+6)po2O9uIH8?Vbn;VWbg}7t{_txPv^zbg^#RcUnSf8MCkA)MBwoCjrm0gY z^{l*!P}bB!+uRrnloN6_Elmq8)Go<7&`C;?3Aj-qo3m!!ZVcMe7@F(V30zlKF`n-1 zpa$zpNBA*vpp>3q3!U$T{JA2q^R)W(huUeSfTbmbIF+R;O^GfqVyhF&=CByC^32y{ zyDBnNw_4H^lIx*rN#sVax*WZnv5BwMIN>1OM9DWjan#$DT+@#!z5*;Qk+)2c4T;3k z-f_)YBvfM)SBSdYIeHsqqI=Ui?uP=CYlH--sFSb)_~|;JxixB?F+o&_@xx0)(a3ql zx8NaeAg0L$M5lY)9gi!Mue9>(QeaB&GhhyWBjs}CfRsFe zN6j(UHl)<3k4MoJ4luBVsZEa=QWTN^D)9hC!78=28|)>9k)oFmr#ZmQ#zlx3ryj>^ zP^9V>xFR+N2r?o_f`8axr+Q!QMjb0Uy%IZ)O}_=!;;D^}jnuW+B#up=4aUG+f~-e% z6>Os}pHZgMA~G0aL|C!fZKm3gg*HmAQc8*ff=|P4RllArS@CkNMu!|fE^>mBQkHCA zkmwtzfPrwHT8)6^b9-X1%$drCS*gmWWNJ%Gc3`v5r34jRgQW@Bl@fZ{Ao=6XA{uF+ z84{b{a!JYLWZpOBUH8G1ujPO-G5NWT`R$5d%5>R^cCD9cPSfMdsl6GuZX&{0L2d3BqCYK@DEkc79M8(N@4!0!05BL;rl2+K%(ExyKEpSII z@LqvJ%W8Bz>FON0(g-g@i!~{XKpqr^(60~{`=D>V#wN*wO_$mpFyS z={`665n~6y7m;1d@hY<-KIBuvWg&*Rfh?g!4Mi&_!UFXRf`2CS=B{9A5f>GyGul&6 zw6L9D9Rw?KPNE35$Cbz!3E7@muO>pJjK50E6BODaNo7(J+bl;?fl`u9!C1Adn;Rau z<(_gx?5$Cg8O5orzd7YD8bkV)(H1sJ1;7H~H#Q_B6X%4X?(a^gTRlGaCmdL}rC6Ox zH4Tm+0#745KmyyGk#c;1CK-z+q9ed*lUHrV-tLQ0N*AemTqCIo8kAC=pzY8F?Vg6I zxrPN_GFG8dm#GoZ+8bNPP|{ML4(DYJMB3LD2OT>kq$+P~lt_sR?lid1&D5>R!5R`z zmx;r-_C0XdI++@yKCRv)$9efqs$!D(DT>Du<&-IUf{K*TY-vgqJU-O{EAJ2%vm4u& zI3@=(IIi^CbS-2hUoWZf+a9*4k|0x{2B^{kT5ZJLQ&#Sga*}Q@wYppM;btucO%9X5 zTaN;eN{n}WD?ui}b2>)$Pv#)=HF! z?Pb`}WdeX2QCd_ALIGLWg>DCzsCzNsZI<{O?>Ne!WDrjYQ6lmpAjkm7DYjDcu4sam z4m`K(ZvjbFih?9dZ!AZZCM`|oP^julW4olOI*^mTu1A;~9){P)8W(02RnwDMsh%vC zTjK`Su%)HGic(6o1fMW1ZSYb{ZwiA=GNgjZEw<=sCE$T_5*)R{ZMU;-rxrF}0y(ZCV<^wItc_EOUk#GsUu67*y{{X8N4LYNj>XBo(`^_SIIxZf#B&33Ges=(m zVZ!EMp|HA3a%sF+;VTH?gAxej3wp?acYSZW+FOdsYNzZt0Y|t`C36IZi3x0GO)`Ni&Tf)3Pi<*z)gury~^+E z1L?L4M5jTem~*L+V?}5zB}rw&#R*p>F8AN_ZO4RAuH_~j^w9gKI@IjP+FE&TUL=}l5KQ=Nc9XoM2eLnn6$}L-PEbo zDJ!yFQnv9V4eWVet{%1bs+>mT{!L`*SyOF<707>JpNv?bSGX-)Jn707`cnLT}UobAMmW zibJ$4!8&efkcJkfol3ZBC|%0Dw;s}P#uSzuJ>dHT9bnl10NJBZ($qLX`^5Q+k1FXX zw(ekkG8FsMT=DS~cH{_6fYKbsvR&x?aMQNbFBXWl-4sDdXKKDYw4@o-0V-i zI zlYDgW6p^A1xLhZdxy8izUWL~3{T7%H{Dhn+vmpAcI3dV zvfPQy)E6y<5TrH{!Afj)0NY8w(}rVWHz(JKaNxm!Tri@v#}M6oG=Y36#uTSQ!T0>m zFO90yzC&dP-@Nspwbz_xc$A```%VU1N;X#scJTrCV_mL1f$A|Xj9i79R?FZay1N2R zfY}ln2}6V0q!!2@{WY9HuhnHsesv<7M@307+O*yy{2&8~*$CL`QBM5_E&QvhZAo={ zJvOHXZ9;Zw>D@FB9&h}E~o=1Dm1WRb$)w~ynOfZp|(&YgN^3x!iZ z-R@-UEG#`1`$~t}4tvd$n}L?}`8m-0&|gc5|Q8SM*yn zsMLv2z8&Y#8gbmRZr6p&Ji>0>YNh!Qdt|BN4o`R>mwWfTkwN><_smYEQr&%8m5Oo4= zYiqf%+>NpBXQ{_`C`rrGXw5R?j`zBI0R?GXw$Bk|2~a%8FjRGgs@69ug-c#urPFG4 zCDk0ORq49J%{HDA(uAu?Db%Z5Qpnb!atYgFF$S9MV9jtFt4pFGxD3eSQ{Pj|N=v%{ zK)5eh*p*!%3-9+hhhL^)mjmpHGv-GkO}4=Dy+k{{o#vxbO&7k?(+;Zp zk!3oB)fSsJ;{ZC6G%c_TdX%LEfNXVb*B3@mmb5`~n?6b(mZ5PS)TIteWi}y7Xdeoc z2I<>-n_9yN@+mbov;JI&6HgOaaj7*{;4bXC5&d}F1tDPAX(WXuLe?X8b<*lQ2dPP? zgCTN@>8fi;)8o_@e(zWtTk_i+I-{szO{Bp;-%skJ4V1bZIUe&L*OeZ2kqSeszL@Ss zkEu#rOK3?^w~I*lq=COH*k1|kLT;syY49gJ>$OTGxs+5VP1hj^QkzbqNU>9BWx}J+ zr^dyARjR>C>4kcoB6Li_BDG7J&|+qXB*k?G;zAG*mz!1WE`+5jNdz4UI~(GCE|o}Y zyHpZ=It$(g)N0Rql@)JmpCKnUrOHlGs8Xp;70G@paY~+4c&mMv9#BcrkAu{uBbhcO zMBBnPygI*UvMO{1)!@4bA@ms&7*x3&5_#)K6(LQm@2D!=h78u9OliV&sY&3r(ypgp zQa%$-!(FaCL#XHtu%g~?%Y}$?QkCH(SQ4RgeTOoAMK3Ti+)AXZ$D}%nYs|Qoce3>;l-%?H zkIDwv`o7T8c6OYl0dkXU&FAI3TZqJi^zUk8CE_Il<885j?|&LN{Yh#~KA_A=EkKOG zh=Z&t)E3jBI?8!2z#Z++khQQuhXa!1!I=8lZd{HzC01Hcs{jRsvCau_DVQB#&XD0vfbIt{J1*voaz z7)J|4UO-X$vPtE6oM)9!#OlU%4Q*&$B z{!dmIX0Buh+D$qlmbZW|0~R_Maq;C1+Go*Y%wW0zJWg~E>uUF zaYeGqTeY>Q5&#z*u0A&_V@%OmQ?$3K&ATS4Dd!g3af2yZhJ)ftQjv97dvRM)b2Qps zW{%^O_nT%D$#O$aH{JgLgE!Np?vgFkY;m5@Mx~IXjjRQv7&th+r_55od1@i?3I;vQ z_)hs0TQ6mL)GX?$m0EPzZ59ZOr&37Yb8jLEIH{1g6DqdYi556F7q^t}TDo0&^r=!(CVY7jvA%F*P|VFTqW)uUMIkDE_^5Cx z0UDB|tXWA{m_MGxZ;J+lnPW<)!+ECG+s^6^AOY|zNF_--i&%?t#c`J>7wL~dW~`cI z;`nsDwQ1ra*3+uu+wW|4aZk(4W^E!A~T1AG4fSEBj9IU+~ySPz>ZV0zuOXPx+fqx9 zK9wkIQ6wQLNF_=(Cct#w+hdg;lI=#V8bpADHQOb$Hk722I*sk9`QqB7IS!f;mMV1d zHZ+?$<6**urO-5>g^jP`Alx0gTw~Z$%nOrN@U6qbo0D5HOY{tw@oF$cm82j=+W3vh02caegITKr9|c#PZtz07U%VnVFAYZaY#7 zUCp#KT40A8X`l~;sM$(T)Kq>o{alAy#K)aOG2=vW(zG2Vn-jWrPy@@SOf3+>BpCCz z>(hGShEA14jGOZrnHvu+>2h0P*|M4qw6>!*qQvmR#0x3#%o z!#&+IW|*NNlGI|2KNO7wi+gW`Idrt90f8gGCQ3?`PE2_2X(&=sQW^m1XsJO;C0xRX zL5C_MPewzrm_m)n)9w#ldZ zH|A009e?MwjK*Z)ZrpPr^Nzj0>$Qx=PNVClHT2c_?s~Y&H64Mya-sZsv1Id3*sZ{pfOJ0Cn@w=3^_XnFbL(tJ25pp|U6%*l>Rk5HXXb>sv|FXVk{@oi0XNsz`85juN(|HY~CMyHXM4 zw5SXJ0AvoM-U`@ zjlK*Mu-Fm3tU$qZQ)D|SFHVxT6yC_v;Rrw+f>Hnj+HOy?1e<9s3pN1r_4@F`#9RxU zd$*1~ne9uEQh$%vpO4r^UZO;%sdD%rOL$ppCiVw%2SIbWw=>TMEIEtg#T5>x*^f0` zdiVVp=|_o0QJP9b04oU>-uJ*<;VxVpy)IM)YIzA8=qu2YKvJ}hdPUF8g3BXo8{!>h zGTo(0MLoo|Elj8|v&k#A$LY2_DdM!nF$5(*ks_1pG^(6cq%k$>#VNEROvYV(wL0C% zDPDxw=pyFCTi7t2nj^=JoHE18rzy16DoJ6Km4tviA!Lhm)nV-GZQIQHGIJ`c57O&2 z`h=F$)UG6bEQP5BE3(oGohJSfxEmYWVyD`IrMEk?l~j0$eG^>OU;@%E*c%VJwYhca zk4PQSWn<3cKTt4k3=ICGMZ`STohv^{wl_EVf7I~A5&$AVGhWZ3^PkEPoY=rTH zehp)ALWk$&wYlP|3wY-F4E7z#AWL8%#ptfW%P!{WI({gtzO}(E^Bhof*9w9@PLFb%L{z{s$4!K6pvs;@@4@*>ejTaQ@Q(A4r8i7$r*c6W;Z(XRY;g!N4 zsYa|uRGINYnq^A}B`XI(Hz3&g5D4{RxLq1uVr+J1NL4{{w6>R2>Pb!2ZM=r}*ng$A zc1xqhqCEzdh|QSDB}r|v3Qhhtqq_I!j`a<&rKUsyXc&uF#sPuLp{!k6np>c(-2Mz_ zl%Ebj)MY&p=}gJ*hD?`&M1bP0U=nT(`CE|ok-j)oGb3>RJxH|hK^A581(X7IAlwiK zTlG5(2%ylE)g{`FWSsn#E}aqM_=UBBbI8~pV06GP(J0K)=d!rurBXGhG#7gSZE#NZ zK1Y!UbI%R!4PKvI=W?X~0C|f6jj7FRQl~$}NFsjLK3i1rn&~w*>Xdfpxj=qXO(BY- znu=BswW&?4qU%amsV3kXl#N?*g_$!HYJ{j1xNpg=E({RE^$r=;EkLN=#A+!>P)I7? z$r}tM$zdE)kQ4_oalO5GDM?ZVEyz))qMazb98=0FKyd(Uok`zgy}56Q*2-8t;s@wj zCliWzC1>U}#$81BF6_&SLPo=>1LyVNQ}P0$p``+)gYl1C5UiN<>&Y!SX4X)Jy{AtR zKmeuN)^2`%@IsMDk1+;Ea3O5Jp|*_x#Da8b&Ni@YMBuS{W zR;h6!mBCvNufo%4Tgkr>J6itRo>#<7&pM)-wJs?kG2MF7)YE5BvPtl)gJKP~KEEs) ziA&3KxX>NTvFOsyS< za5~^|g|P3>WAA=c)M|K}93H;+p*1=+RhY_o*W489lf~sMq%CIV_a@+#a@&zTe4=N` zaiPCZq8fl^Vp7VX$a$?nrgV}SBKJy&P&eG{FV6Uz9;-cv)fNQR{37^-OKlDz0@uEh zIqai;f13-=AVR3e%u?Ppi2d+ACDk`)Zf{|vn;q;2m4MrxD|xhPmHsgTe@4PiH;uLx zjNfQ#5veG6f?~i;$+)~iNsLS$RL3d#&Uz}Ks*+;S+KN+AMAlT5NFWeYQncM^DbfJ; z?a;AgV+DCep7kxIH3h)pA8Y`C(JeS*UXb*j=OWx3I3A=2x~<1nSHIFOJIrCeUh z7XXi6Gls6uB=CDZ%gmrg06SyF(ZAR8ea+yN2Yma%fhM9=~k5}uHT5v^~`xw zYF4Jyg|>95BoKPv3%#SyZS+En{8cTvD_S0IGX=%Gs{#lbwgYl(MY;607+Q{GLui_W zc#cK@vlM#pI z#+Ie(<5dBB51R!QgdSkvT~=hIOJ<)4G$cBOIj!P!r3tV* z4awgGmCDC&p95=Wfz4!aw)!q?Iol6+O758>zTbEw8L_-h z`{twU-BR@}5WJ3rPm#Yd)-6lYYl(6n8Y@n`rR)-x{6$;aRr-4HA|+~EDw_fu%arV+ zs$6$g6qbhHasbh9KbYSGqrWJ|<02vxPyt*RiE!z9*0BQjJk7=a=cf3bDw_L~BF0Tc zh?QqjRWdtmoh2adQc1WrzdenOy>qjxXG*T)hUdfXIa+7J{HS9{TS}G97r0D3&?*bH)RG7wU#L49docGBy$+`m%(B}qIFgwGJkMJV>;?R=z1jN_CDA2G zk4!|F>;eRNO5!M}6<83iM`WG6&ycWg(c6ueCDZEEvYKS)Tu^=+X&g169(oTUYXfbs z%ETN4LNqEm~oD$)1hi{DMO0|dPg9jc^eV_ymsL3-c%wLgU;PO{i{XBJEU=-T=e%n zX&JuF!zy)BCf$?(ONwsRP4+u;>$k51YS}TU=|jw{sfP}bR#KlYZLy+1ld1R+m@!yW{!Dda~pZI0OWE-gy~)P8E#^vX`3y>&4^ z$hU-^c^IHsTeIUkGB(XkcZ7!4`qI(5ozM@#k6E$$I^o*4mnrI`w^gJ^R0ktH!r(4a zxNdlFeOp@J`nD^$b(!Y1-XX1QG=+uy8noF&?aQss9Q`+2tjPV?^KTgL4K2E`l`TTx zgo|6(>)7G?ts||t7t8EMN53kJ6cLNjx@|y2dDSIqcoa5}+7qp$__>Q4kZeb*o(!{5 zMED{qol_wXha=n&q$qBcC!MTzvA8%$mzY&as4WvBA~}$dQ>kN4pbgI5Y;XofXMVcu z)JL5BcyL%RzNn}Pb#}1Og?-4m-2S_dQ>iQt`viOY*KII$Dh&aev!_<0L6GFCy+Lox zb)e8vl9on*Z=?(PpIJYX3G*e>YPD3Q))gg6C46OWn**>(_V2K_hY8W3LUJ@p>@g%F zT9lPFCBoy0QWQspXPS1w=2>=Akl0E?WzA~%K&?ms4;pQK?nTFuIPB`qul$F@5fNyC zVI3`AxurT~aCy%;yz}+FFgr~^bWM)vW@3N?S-MsSLOo*p{(O4GHfH6}`_Q7PrOfWe zE6*-6P^1xm5>3>iZ>MmPZl{+xbDe2?83mW0Z6qa0BwSz184jl4)LfGxhR~-ACNzT5 zloW2Ht7C9TxFd1-vvp2u3JMu9e-A<+-qQzaTJ-uwI2;N2hWGpJIf%fad7m#g33YWQ ztR*YjOq#`ea|pQw+i{&GO*1W zp&{im)j~{18wDr86r>cAl#y^q>PhRi8Ys>^7qhIq!WNbiW0lEwk~{z_80Zy zj?L6+Spj;RksO5c#@vp1*NN{IY_1cr<<`{imFlm@M0$K<4r{66Xj<&qV0>Z%@4897 z(t3f$o@d zTHpFN7>QVI_g*rLXT$LVRE9r=Q;m{Bm1OQhNwKgd$CfsA4KRlb$rm{>6LWqLVhMq3 z5hRJ4&+MK(R-_jeLIwVybM%6tCjS8KH%W+$*KC^cc9!6KSd!bCT?>$>mM^%vlmS1O zTG;iGuCZJG>0o<5ayj$_i-1wZ;ag?z`@OKm4vN?SZ78=o?H0qemD zYGs$)NOTfF@DeUAFJp*}zUwCFHYdzrTbX|^zUtk3FH#(FYARBd39?Bia(2dA`#Da& zTya*vn2=%(fDyIM=9nDeOaVng#r`-r^*Xs)_VPFia?V*d{{U-LZ~O~?s~B?5SmZL@ zAAP^vP5!KE(x}P0P0D_Zc%rYlN#E6gr!3`=cFVM}?23{4G3d*gaBe76@NLnXV4{oZ zQTvsB81$uE{2GtVfZq2q@Hg}{V0}SD^y3a)pNE-yyg}zN%SpIhf6lD07*B&Skt&@P`F4DCB0K&HZ{7e0&G*(}w zld|g9{`^b*rV1)-IxJ}L?u>ZJuS#B zl%dvSxXsjUb94|-#ZKqT9DM;z0^#bw{{Re1{iZQgDsV{ndd2+xFDHNU@xs!l9OF@+ zap!@b)hV`6)bI$Oew{H(kM>QFpOfBRrc~DWwVYL6K~T3X_2N{H&oYC~%6)Jp@uauH zb;Qe3R;2au4aLVYH$6$gVMC{l`^9JcZzt{JQ$vIW#H!H$01HU_cnfJeQ^M3RXcOI; z{W-Z<@o^hNaOVfnzqbNB7C!*N+uPjy4nP zL)lVZI#ZlUgPM7AQNt9YJF-lw_(HR}`~6?59x9O0AJZDo&p7_9P8_Y2YIQ>$z?qhaB;B;>}^ zjj2rbPm}muAgbC@Icc3^^y$Clz^dP7S#p@eGa3rnxXf8#-w7#-&B+yqOCuBxVO_g@UOM4YQ4b^lUxnS4Fepc^|JV9aJM&rJpE~*)8 zy^Nez$DSIXE?e5>cvu8r^!pEnQFJi%Gl!HAc6O5Pn=VW19?+PZ*pqTk!#)nX-e<=i z8q~YNL`?9XJ#FW_xObpT9-@ShU{wuBwX5;REykz6P)w(HvMN1ID74q^F7ajiBj>FBRA$-knlPuJ6(v>yF4((9 z3a#O4N3V^yg_g)}!0LFFUNasRhV-i4Ub(E*5#u6t-DGa;pz)a8yDeuX;KySHTE+NX zt{vMKb>Yr$4IO+xC|Lh>EFnp63P*P9E;+phnDDlNG7fNo`V(wPB;DJ_#Xe$dpW$Le z(Ho>Cs`3&%-hAQ!VuKTwTvSu7Nki1Vi(EpniDjp|;47%xn|Wdxgx`EAZacxSR_kuP zB?}+qBzg4+MI!cGtKYB*iA%kc)cfg~W6jtnzYLAFlmNTA`hD?uO(vad3~uR0MBS#G zue^m^9>dR1GaX8mMZX<3UmKKDMsw}FzJ)BWIcY{JpBK)y6z<^#{U(4#2R{~jC0no4 zk47xV9;w{#;$1Y7>qW(c#WX5>$ysUQQhQ7uu2yjqqX8VVZ4#Z6gyxl2L1{X$)6+ND zRhV(Vs4e2lQ8&SX7rfWSHILs_E9Ak~+bD;51MS{QNKBkt1X-GrNhOg`Z#vggpAf&6 z*`e7TDM7W-q?kn-z5oliaxj1|nJPptOL8EF=ueDVT!7A}0k-I zzrPuu`_#HLNAWdVcGOlT{R4i*_8z?vHB6PWkrcVOHHeyuMQ60;*{H<+7v$$U9Q3iSJ4y(=O&0g1 zHzC*rtnij6ac*`M@asy$1tlczk6|kzESeI+jGz<17fF;i{kdXdYRGi(#7C8M-{VRT zA*=ff3_JBIa(?NIA-(P*Z?0JJ%a!R09- zXY!5UWB=3p-aE!#@vezxLsr?^y)CAUjUd>Df)OgA6WVb}y}q1bavb33G8f`^ssJ=C zAAVaXQaj^9C$~q2I7G*;zOU_!w@aqB`>W-&n8Qy6GGI`3&rU4WE%SA+fnR1`BXn_p cZa7stC-Ca+9}N=gbc3JNM3#wS!jdTI&^S~gmGCMFgZ z7OE%gAa-UDBQp#0e?TxEtK#9}k>cZ%G6N}q%>TdB!&d+WJ_ZsK6AObKfJuRYMS<}! z0AK!sN{6{Erh@h(${DYEP3Mqvoc@(IO z6DkjSBA{)U`aQe_fbl6XpwuLhI@4AoSb?}9@mt)yVesJ0ho~?zF2kE_(D^H-$ie!O zVE{ZjW@P6}kFV3y!rExa47(+C?sLHzeT?464T0LZovF#Px?q0V`lQ2J!-HvyEoAW0 z10b;BMp|n?uSkMfNpK;c7|2IxYgv?JnP_;SkT>uf{mbeBQ1DLEaY9PVe+xZtS0jAC zyia_d>6Fa-!A6D&kj^GkfR){I_ol(2Ii^@otDik6Q_bY~5;oOQsj2jN%3Xpqk~$?0gh4C-``HEV zV98O|KZzAbKhg2VEXO-sZp9p-7y1@45mqW=;XI1J2>_Bps%r0Ycz3CT&(5m7l3h+* zuVM-oI9nN_GvAp7shdc?cGdIV(xLkI{SSF?%3QI*$rIvZm4*j^QyhP1K@R5Ll#Ak-#`AwyduM^@v+YTAtJ=XIX;ut za9=NVacE!SW0Y6zF*nPoR~Iv_u*_L;iU{A@Jl z=kVVGK|4fmB_9AR!|J?pPbgIt&rWJyP~t+5WiF~uUXYp8SRia)a75-9yvXOAhp}Cb zIx03biI$VJr4~zY?468riDtfb@j8%AHGbRh^m&c2M3b_nA5nF2e^C+UZ?6bdA3HqO zZJ@JwINd)hvg;GSI6Vq4qDv;iE5^z06ysGnNzwC-nSVHle%Bt6zq|Dxa=xsi`e1u= zD}T(Sr_+*3v$jlh9DC@+2`_Y1BeOaI_xu0$e$9V1?Yw)0@OxyI zhbYVax1isDH7PwpI6Z#BBXO{rY2k9(`)u5xfUp90Be1fV|SS+9` z3kuk^*dKgWoJA=rmRe4N9kT%KzJx>cR-IGhM`PzVjpGQeD2&__DmbFz@ z@a_B-WQ^=Ns*42U9jED&m)ct>lw6fqjmFP0k&d53&$tcsKkGZGqjp6}DTlGFo8J*| ze|VN#wz>?RPO#<~>aHZON{*IJNC}Tugl3D7nyPhtna-m)D+`Otekpwkj7sO=Ky{f; z%zv9si|{~rsB25+NhbO~0QAP?>q4>qIuL*%DU4ph1@|Ww1cD;~b^Fv|oOs3nvuy^L zb;7XrfM56m91%}cP3p=T>JK{(A~BSN3#sqPNwyO#dZmBgu zM5Ir+=%B3#R5{SGkY3Es29?1WDHW+a%u6>gQ@EIrU&Oo81FUi|mJ@)*UHV4|hf?*S~rwIMD(ERU&<%IB}m~)PD(UHhB zi82j|2%dpS<2?^Hb2n2s1h?krM3!^?COa$wm9Hu<%N_s9t1~{XRVm!l#%XYnyCM8X zUBzn6H843zno`Gwf+Om0;C4!u!K z8NR)#PO!_xObZ2nYo=(CIgd*Bwh@r%p7?iNu4@g+O$QVE*<#rl2^Cg{D6uHLdH@`kuipAR00`ILb}<`0vi$eqR}5qfCon2i9rAK~ zq7`Yk^Pddc$5KZH&*iR_9{~444}fTAv@>8$BA%DH8w6V=G-LKj~=0LaU8 z)3$EMDWQFx1sCCm+T?Ou1!C!`_1o$YWrf*sF7nVXFg8?*;NS~VsRo*~J(#J23D!XP zWI64a0PajlAYEeGE4#F{%UCO}A*c__O*sMg>m9S9e^hc00PUW2^JiC{LVWtMCl~+u z+=XOzA&e`SsSC+xMUP}jMJP%(y6XY3F8lyE-~UIwJ<*!{_i5;(iR)`YdSL;hy~Ve> z9~(L0p5*bz>nnW#?9Tn=9WOY109>0N-9Ne!(axib;#=R{AdBg2oS40ei&6cqZ0e?F zP=j=0a``2SS4LENS;;Ie}zRPL7-wJpD#7@iO;%N0Gymn=9+vgAc z?nn+IPOwgQai#G}%NIypUi^ksgvo!E;o%U+0@kVj&Z(T)fKCKjquRKfvyC)x45KRuBM5xHQlYVeo(v=!_y7%VH4lgTcBGV zMHRJ3xHMDnF(3Iz3a{}HY=vu+f!WvVw=`b~?-4B?fc}YcbB#N`-4J&3w$32i;Es>} z!^pC%RR;)S5!7yb!4Sklg0%>`iDz$3%>nm$#kcA-kV+crm(x?Hc7wN?l77%~q*M4) z`Yu*`K(IWHOPe-NYq+I%;t=6s4$T-7XXS)Cy0YGVeT{9asMxY2tQUOm??C_FepH0u zt-|<+{!O!n2~5ffYl6Z^?c=rT$Sl5&ET)?-akgSg=6Nk_M%|+y0A@3M?X#agl_5za zDr@F&bka)k+49w0uuoYAegYj}j3J6EJ|?C~5V>>_$PS6e{91_UM%bf{>+uYu z7kbS941+06&AFjTH}vY#J2sZy+d8W75@Wbj{`Oz{51w`6Q7_j(#V;v-mW9`W-c}=0 zMuALUpUl?OeQK_DaMjQXFTLNB|D7jC@m;S|o*Fx~G_ZcXW!>2LID6tHtgu+7fimQn zI$9?4=?{}T&lTezgY+vYVLsOM+N%E;{_TCOMmNPTmfn|!Zl~VuVJnUeop2VpjXHwP z?I&K-G}P=)hz23-->Fh}&jU?%a*AqWExePhibLc>ZciYNN(>jf776|-Q@HSyZah5UZWqH}`rSA;NUf;XWQSVZdSDZw776C457FjE}7p}>bk{mpJ(xSX99pMCvL&X$o zsedY$&E6NrUhN#|174}Yz2tn^e4-}Gncze9vtPw1+$G0!m8 za=fm#r$;jv3aKp-;8V!2F5@8YmYgxOJu12<&m@D2?E+lC+mGm@H0S7;=aparj33r& zf4w=cIOK+xElJ5t|EU=$kh%bYLcCSNJVYYyN1TTI!wm0>{gVIA;L1YX^|j6C7RKS- zN=>XRtnPB)lQ4Ce6$2H)D4Wx>>dc>-N-PnDZ8e@bzN(=W#W`|!icSm=9+$!J2ugS9 zTjPlgbx5m;Wv@C9#b=ROvRL(TbT)gM_t7Xj;?Ye)#~TSQ6Re&TC4>*#ofh(oOP7Ng zlfGJitcC1(STMKu7J~A^q|33HZ7k1=x&8gTAQH?X%5r>kX(#_WdL;J{w?cRCZ->rm zYP=F>^Pe>^v*|qk`1~eY`>yEGE=O{8CP5=gcT2n3zcyua21#svD`?P*x%z9|%Jn}1tepvDl-9)OvnemDie zlP*7xkbVJFIkI?pHTm?&EmZtc?Qb8F;4y=WGEX-p{NMM~o1f&T;a{aZYv~~elpgCh za}~4%i+MRbVWJpGO)9RJyGB&?)Lx9X8DnBRbQSM)ysRc`SxXJX;`UOMHK4*H?Cn}- zV-Dx}BuTu3rC1s0910~<->*_rMA|o^oIfLnr*LQ`3fV^YCO?;ygO{njr*pQB?0AW(taeB4p zD&Y7hN3LBOh&X0(46(_&zXJ469Lpewyezp)(eqtTG8kDj3V z8+(!~2vj3ZF?O=MLw!>%6FjA_(W^6ns}?$l`gVjJuUN3`diuND8sgES_{oI`qGv(s z5g~t!TfgW9btK}3`Dr-dZGvxC2OU!Sjk%|6WaG+XcWtMUoLf;yN2Vv&jE3fxh(2#u zG1Nsk($ha}Kbw&c80B{}_Uv<&CG*j}i)u*hBBsH%Hwhf?`{PJS*fGzS(|Y{5R{77y zvS9Cs1{89Ej(K*!~Bb8S3%oK38d)TegtQq022yX`-F!?U5T3lc!<+)mk>IVoVIsW+Uik!r`kxag|!6{o7?`}`W*P-$fQpRPSthwF=MX3n^Hpbx`qC0@o!spuZ zC>-KIXzJdQXo6+iZBPSMI?J%$3$0$p_JGr>t10Pf0gfRCKVa9pyZnFNDsW5#A4b1c zQLs3@0>`QzRY#qe4A_Zhn1QJ*elcbv@cwocKjD2*3t6Ww*D-E)xceTMXNgk1%Iha1 z?1)z2glYZ<-x4|nmGS@(td1n$pv7X)d&0Q(kNVq~FT{t$_c2lN3}f6#6yS*aOJu5(-zcfLo%EFj)fPb)|EWX z|5%}Shs}a2Mt2C01N+yZ4g+4_)EO?em9cFL@l9e%5hN{~#v0qATRJWh^z4D57U;S?jef(Uk);N-9~VUiuYkP&O}E z^8zBmm0{;DaA7S;Ant%cA|PQ(J0C08Km})l6tb`AgwPSz(Lwwf;f0^j z>1|42Bw6g$ubT-SQ4tz=tJ+gbHV&hMC3ahdC-i`VZW%7C>bM5h)WE%=aFoY-PNN=> z^v<)kz-+x`ilj~DItEIIf5AL%%gV(jF)mGv++N{*+T%VYP%74xN#0+yrvRZB{Jwdn z9b$2?-)9LOES(eTI*zozTPJY)DN@~NbjBWC*M(9dgfPLO-l-d8NqDc(hQ;{my0E)n zUx%O{SO!`!@!>@#TS7xsq8v>m5VuCyBGaZTKC2IdD=)-rG67e{;(-3gzO)uavazFA z1&@_{5o@&;7Q(y81xeMtRLN~Il-coRtHt;8M@E}k+EvnmciKUXaXW8jO+~m4f=rw* z{OG!u_#29VuDHPbl5Ya%I1nyiArIn#Z`QJ$9U#n+ zRw!&w=yO%nu>tX^1~{j+luV{OOIab;jDGO)d>Js1^S@i+3vKvV^sPtZxMW3~eO);A z^F3{)llHKNY$J6$0Vxi-lp1W}$Tt9#K>BJ;WNm7{Y2ILPg|AHetziMq5E7oRdyNgc zlBXxtmS1)4G;kWCtQ$bJ{W@gnRQWP)`-UMI80!kc5nc#CaKzwhN7@CU9TN$9AdIj3 zcRdQr$raBFc+E6x94BScBBNys)i;%A>e?MI$ZtDD=V%Z}VuD}rVIKwm9ZVr4A z5p%6bzbQ*6L^wT9Fw;_)^H;*?D`$XdCwJJZ9>tJeN;8*ChFJHnQRnc9Z+9Z8rCaF8 z)^t@TKgt~GG?~AH8o*jV=jyyTrkT#>)ZA;o80vk(4GhTNou10d)WJH(;`;8~I$xl+oSFPJ(tn zd`h%VHb+aU+~Dw3QZqcKs!7jQZ2E*LB>rC?+K*p!{kl-G1Ro6580?}BgEnm*D(bL% z%sKv=N;sJCpMAa)PLzt;F+GJ1_?a`SVMsjy2F;)WNRlrX5$SRyUA(F_)snS15i%SH z!PT>zfuy9QYc9vdj;ME?2|nA9`R(9c`HO`r6IkjfSEl58LTDl@V;*=-I5kCCn-ag5 z;ot4>y@)zt7Y{u2GrpVGD3cuEy6f^8g@nKZodX^sq~3g)}kB=@v=>i=s9G2%|#vuR}fGxG3kB8pi)%jlgFc z(%MTQ!zcwEMIl)q`DuZkNFT{W;D*-0E=MdWOl8C4qgr>&n}{DM*MJ>c(D|L(RZyj( z#Q3ud@q*VXf}_&3{n>u-jpgy!F}yYIvQ3Kmp*ZTmi1pHOJm z?LSOPqQB2YQRL>)Kbnf``AOr}&c`{ZQVH*%ORB|Dm?sZ-J90J_sS%(qYdFJ`Mmb5q zuuDBN?9px(*3jn<&vd(bA%G#Y%r)BW_=UdXqLSe>^x*D2GE?riC(-af8Q$SYac!jhzA^|;h~ z4b8lFqAoit`4dRTv&!lzO1te9u3y6St$^3o8mL@?Ck;Z7o@Y{Rq-v|k+;>bN3H^FYGVo1?!0ffLB0TD=*71!tRMUpOB3d5eOO>pKkql&L zW!OYy-Eiq56HmrjVjWn?dVO12yNK8_9Lhs)Bn=C=s~jjD6)8$UkUkDS$@2_U&4DYp zl=|(mBE zNrHH^g!}i_7L3mo#ovs(>X$bS+fGA$nmV$q?66=yq(|&IsKB$;V%#tFtVypN_f#f5 z*GlM%p4Vx+C#o@=EtX!-NJn65=L_HoPkz>kmo1izF1iPITPHRw*eZW$r&x)j}huf@%darxb zv010NVpkOQD!EMTyPts>4=g^$peaKBu(E`Jqc8H#cDW>)$SDsiEd=9Am{`r}6&|bR)JrV5 zN$KFvV_2o6tx~%yk=F_$TQuOY4Zrl01=eO;;8Z!>Qg$^GUbxXV1ImMP7X{K*GCh&YU7>P+No>8?!K%4{74y-g6rKHb1m_ui@*1%~ zVy1>A#;nNjI$r1}+nmmaP>T)uq}sO=IAiWR61XaH4!&3vVO0DkTH2L3fdC@#zm{cd zZv(=3xTmV9u}Y=TeX&VZo-Rv1gT|jq>6VFz%nf7fCnk7o>4B7H-%{^s$SI?6sHtrd z99dV;^n)}YF}K;uPA&I6!h@-7qcnAw=IBdD(+KZp)h~a63BiXstv^=brU%QKZ3m0G z5tbPSlxSCsC7z*RUc4;XfXYrX^4bptL+gv*ienZO^FB_?&tH^$wA-V%u_npg)1_CG z`le@MG*4ft2h`NOx|U*^%@j?GVz|1No-n*TVm}zjB-)sWd7Z^MG#mAf!!(W~uqUC{5s)f~ZV z-+d0Oy@IZS4bsTuu|_$SxqTSyoM9*+P2@APWLjaAIMh#`nZpQA=~%SX1C;UW+=<7} z@12Tx-y-cx)AMQtJ{*c4W??#G^^T&dDUEhts&kb>tjuSLM9l6(aLIU|X{qBo6MI)) z2Qk{HS&p`glH4$ej5t^29_AB_Vf5lLTi48JZwN`esX?DO)vq^9e+UW?;y^as#t||t zv!4YUsF*&^Z>9Jo0>wtMl_tOIEJhaWQHtZq6J}Te0XdSQ$)U6 z3c>jG>_={fpSyDtu~KXIoqw3QBz~ z>A^mlIt}!M0VJ+M6WyfEScle~BHEuYXmG4FNBuLY&8u;It{L#%l1B{-sI0bg7K>9I zcX^?y$vHY<8t}DnfLTsg|DYK1l-*NjVn=&NhS*ju`yIUk6HmH!grBjj@XW)R?vbu*Zh83 zWzl85<#GMIR`Sf*xMHhq&|mcir9#1)@(lr5JuY5D(~db1GWq?W;Cb-`4iAfoRsT9H zhw3P(T#&up+iWA?wHGsaPLBD!(Xji}oaHVFEWmgQwj(e4)O4KEQ&p9Gov11gRx}cf z5PoH}_RNm=hXjLuVyV^5Tmgp!I}*!H^1?rR5}Nc2xS^{(4@BFuNeoHqLc_O|HPbvn zFeAANN7bBTwO6gRgr0(V!4yWLiW-(F<2lAif2OXz6*J2QRr!kd`YUp%*mPj9U8Bx- z$P_#&ohh1I5?8EzGWEiGSy>ld$|Ey%R@8=XAs7j@vkt$R%+*Fq@5FcO@}#9>{E)t% zc`Es+VEL<`sc}SX>68 zxL<3;$BMXkhLzb-Z)8)t8;CtmFPwOPQ5`57PXB_aH`zCc+MK5Jv*@pMPC7lPK{F=) zc-YwDjiC=%-|_*F8Y+CPO`pV~7P7~PdSw-8u1%n>`)_2^H$OzEi@e#)8p(U#@cwU; z(k}DqR*TY4MsBLJUpor44je9BUEJU)Yr{VAw7S;6MFEoO(B23eBNCk=y@y7-z5vu6SPD$GBY0I*8tj^m_ZzF^9zHQM=j-LAZ zO{CjTTNIYM%3W-=uH}0*CuT5=1~M{Lt+b#EeD1RTYm(uu8c~f>1!5qJbRow@TMUb| zfEK%Kt(tc_4T#)jtxTaOqd0j%)Y$jrssGzIN6#Nuh__$6S8==!0$D$ftR>#})i)dF zGQx3xpynUvI;UL9_{apN`*gca4y$-rN#)VY!;vn0d|FDeC0x^P;r{LO&(ij#qOyCE zXMSQm>urF@Pepzm@R&5pCob&Utp2%jkjFqT1vN zi~#uVUfL429OE_M{r7qG7Opi?b}?Hu2TTgXC$-Ld8i<>+eT5@lfTayNK1ar!AGcsz zcymk3g6p4A$BStkJS+!2NtpU&>NuWtE9<-F=&|?5@D90WbFdSFwxiW4BChTQhCr}x zKU8@l3}`NXFdcHF>D4=S=e4cS3w9OBFn55!)TeuAGzuUl4WL55Q)bWsE>+LAzcqL{4isz1 zFH{TXnNMj0d9fEm0@vRnHs3kNWn6IzV8@sQLpUV9k&^kpyQQy+FXn&lOh?4W$W3NX zd_f(aEjcav{CUuav!&r5QAGHdejU-zEB8K8>%(O(Y64~7FfwC@3$`T;r?_WqFzX`! z7;-6w5fC|38etGZbykL`c5ZyU@^%?q8j1l5-Z{ji9wE*Ow8hznHQ zSk2b7OP>Z=rN!y#bzSx__vQgb6_sOH1!u8RB5IqHncK?sA4Wh)UxuMKH?a8>Q4xM7DciW+&W;_|)kcP9J4~-NQUKbK1 z!Ut?imgpmlaps_k)A4tyVn3huG5(qO_ue49L-Er~iy|^iwE&J1b%r-hqaD=?t|_Z8 zVGB;p4qKT(O8H5eV(uOKUH~U6Rqp3N8oQ{fm`i_K0b5ZvK?K0|~G5R22Ke9r@ZY&9w*Y@;=I_<-Fje z4;R0hHgn$8!4!!se95Iai5Dt;p3DL~l}3@GoWdP$TO(i%IZBXWm<}iCf`Mgt?RQ}Z zk;b{|xK$rL3eSlL`T%$j2)PybyAv8V8H;3baTw&}w1-{om#h8d4r4yj)O&}^830^w1X;d6?4;Ud>Lg&?LIY#vUrjghxk{?^sjJrrw_(UuuH{D-CqFY zuqztUnXjevsH#h4B=y`1hjUwpbVgR{>lcf_hLBz2ns-I7!nU8mjy_7TR z*96^^tEl}`{z;Rw$pe-JVa&GW)Q`2fs>+`XQusocCcQQSjH7Y`7>JYCUw!x5UN_g) zUQMU)i$#%u&PIxvX4)BZYd>L-3FjXJvVyq+3w0IbS)Rj;2 zKxm9p*%A(XOFE61IjyOPcyrq^073__rrt=Iqpdam@Cg9I$^#pvL_%GfqHfSA567cx}B@Bh*=o87iBXp}s)8%gkHp2hz#7lb85c;iD zAtBGOlG4o;uq|z{j39l5#F5InroZ}tWM3yYL4FXF2U?5EPHd0cC{Hx~O@N85_~ zr~wghUdz6X<(2~ppx=-T|6a6F(5DMid%mu0^c_pzWQadR%0LWP?WEj=GysowTUn=a zh$CH0)i~Q2UNSbY#(1|A`!>*9E0jarq+*Lnn>evzY)??)6mVJ5irtl)rt;h!Y0dDn zmBb@;mYWui?WjC}w%;(26d|uG4si7hFd#_g*#dYKz2lJqccf+M zDh-s29y-8bu2jY5Pdmk&isx4J%sy~Q#_cN`jYhJ6jceqC!efOf`$^9apujpY)>G%y zdMvCZRUpBp%*DI@C*y`bWx`Vn%KXJvwj00Plwgz@7sS8XQV#mzYc$L zo?y+E-$$nAH@(kfD#cgMn1&a1tyG^|7M0_55lkVZhMtA9pS-Ov_T8c3MdD2``!1MW z3?Imq?U~7J*>JMucX5}3q1piilpysDvk%5T;;++r)cdRI-YJ(A*;@U%a)+qPGXYQ} zuM}z=TBuD!$sI|I6~1-C@OZaz>vHpCJXpcT5tq0N)m2)9?^#2C9nBWS2k)% z`lGzMi2R*2MS%mBHwj88Cw$8DBKvgkm)HmA55c|9)Nrnf>q?d<>WZSGIhCEnrFN&r zU51OT^kI{~_NV4bE82`O%PUmIpHDKpm&ok<<69Yo%}HjPrGP_B&7;<8GCbLYN4;z$ zaVN1*puFw`ZTs0t)T(Ck=C!Y~Zyt8X28QulATFVS>HzU(5k*S#-<7B9xwLUiV!U1` z#|0&r<#VtDAM0-4v}f8bhO+g?u_5+G2sBRe_ttV+`(92*F1~}4S3RLFC!Uc6wGBC? zK)zs$lW)csElyHg9rC8dRb;ZuX?b~Jt$C-eTz}o0>o4O4-=3of`a+|E13JMQHH>MB z8oVCUPjwbx*|2f@bP$JXogZ|p-`T(&bbYzPo(3{!_7&~r_5k}#HoZSpa5J4u9fj(D zXEx!In5gj;IZ@tidH*T(Daf9{bJ-8Rmv)L|r4%ho;{hQZzFr#mN=-C3m%yiu7T?U~ zGAgk>G3Z`^ZQB!QbVR(jr=h`DXJ#-ac2bd7XL0wqU1>_Xp8qb-gS$vFoAO=@6l`h))2f{&cP^3Ai~O5;Qz3l>)0d zyCN3!dEFPY<#l_-IlL3o5P*?YxD-G&V#$emgyyZGeU^)vAh0qjcbK*9cY;5>*y+gj z6~W1%PcyK5cLD72b^A`=hwxdcs3Smg;43cnq1(59Ua`D>J`41F&7zhwnfb{i)#4|*;TGamGGG~YE5h|T5N5;i~+nYmrDw)h zRKL>t(H7LyeCjWm`#%0hs~ttgVareH9y3ko!+MYmKDad~uWjV{T4Kh-v-tgPk6*xD z^`2J{o0fNw^DeRXnnXS@D3e{pbCJk9jrE39sapLu@R;xIw5yPemOQ5xjx+4Vq>d0W zyp=E7C_M6u2N-0{?1SzuB&sFE{6x=+F+TIIh;h?R+4ob$+c%)KVrj#2QcR)77PL4< zfB3lVLC#gdS81m-N>89DnIunbzPOLGdQ(%(Cn|IdVnTemn3EM)&gFebaC2HntCe9b zHaTllltAr2OWpfdj~{!Qw^b38021Hr_k@X&bOiDinqTu4R2TO9C;CU(s6pWyq?sSx ztz5Xhpzon69_-ZPRpt{jcGrI$%o2{&UF35(#Y@+P+VxEnBH-q_>RhEc3S%TVLl_1a zPchY{FSns%_c&U<&qPtd^C|~=CkuG;M-d(=NoMDw1PoQgUT0ZN3s(->p`sx`%oir((5L7)=0=~7CnEw&cDVxx_6ftd)r(DtW{(NljNB{Mwt;PC9I*wVN!#>bSZ=LOcDbq)~p5;fh=i zg}E#4U4U`cxaMgs!F={cAaPJv*@vHXWB0$ zAk}^}3305>VujBM#sVUfn$pqk9;da231q8Ul^nF={lyd_0dvMwso8s5Qk>4T^3s;Q z7hAGaaHFEslf|=wA?gAxnV-!O!9&*9^dcK?HNHa#v<=eJ$-?uuxbz*N2tV4JIlVR_ zwr?#HB50|L8bx{O9(q^8ZyRC;0#%lGLy{?VkE1;tPWC*V%?u%USgI>Pz)>8ShM6bZ zq?|FNVu@j1&`vviKV3&!kbN-_=F3j)J5_wQ99uYZxls$+JNnF0Ri<+JhwN-+`TBh9r)&9a== z^e)@E`hz7Rn2(WynMk+8qM?@oP3@)Fn-sb2l{pFrc65gxhhL(#0suOG9Jyp`;Ci`9 z(yTG?I7sONTuav9Md;W->o^>a!Fh!1zsDl<%Yd&+eA+KNih<# z>yEQ!cQ&~H=-XxdrtXsANtrJ0PNV&mjByP;6y#GO8yaKZ@MBPKzFCye#+b&$qqxNk zMc|A`-BeZI!{42)or98KI#{TQ}b*5d))wVOIzU89;2PL(aZVN>Vc9?=oKKawNX4!H2ub9bm5?u2ylJ zyBB-BZD9GJ-U4oGty`^k?jJusJ*aGgn)dHe zthgvODwXkKsOxfwMGGoI*6L{uS@;N7Hkt5fqg(@dwP z?10t2>7YkXkkyRp_hTeL>k|J+oLeg@aHSujAw9=f4iL#22kxrQD24=S@2osO#u{7pA}!PXm627FTXH@HOh!bM(09rb#iSAustw ziZSUJmB#=Ojeg1Ms9H;3N4f-3uo3g zJ=-^rYOycgr{UvdFcIhV#FYjxEiKN>Q{;v_el6 zRx{jf*!F`mI9E&(DTO~I(-lrm+;BpV9I-*f`y*TH>#14B`cIdhit?LY$X4SkF6zQ0 zZ1}iRMzA(WXNa3VEKahkwyU-F#f~Bl{QyqKDE7Q z#ht`2C-1Z`#SKYY&`#VQI7Ez-0dsNVdf%!*_Q6(#egN#MGpzA%h2Ij+jXUW`%A427 zffA9!a}v&)M-g5}wdzZMqy2qlWJPyx^qyF9+lxyuB-3-OyMi)ZvnTJZNTe6MMcMki z%Q;eY{6FU#i`^Is)-kr_AV+t$+5lLXT#Td2xsr;cOYAPoVL%?IwRgH{|Er^{e>(My zF>@~yPX>A9%e)v*9yhn^Tl$KZGid%vUzIS>v3i$POpkh7KeeV%Rde2B7(Qb)o)}To z4O_rX%f~MA2k_FI2U7@%e_M*cjI`BZYIlGPZK$|N?Ad>aqwD?4+uM_w7=UDc+;p!V z9;A4-ohyuP=2Q!5LOuX~){31BW!Qh12wV&JM?Dm%9T4)g4IW69&Azpb=jal^#5=>d zX=&Yus8NkgdG+d*d8tixBw@^Y#NB6?=}<}hjL9s-zG-l}%i-rZO9IkUaTAVhB}UZ| zWf^;hWEUVw z_AI`~L<-F;sujopLW`(CyAGCD5CvNaN7_6xzDy2y98xxKViEG=d0Km?(^Gk>1-?5~ z-w>tva7J|#sP9z$Ch3h_BFyG&$^wMkv>~JK(Rm5;qhJ{?37s4-FLZhl#WFAZMr|8B zw)SzjUHYTHKH$n5nPe6gm~c@Mr=<0L4FC*t)W^2QX52=7Huu_hsSVm4G}c>k<;UbS z(?SxHJhk?*qWXn*R*4&i5;XJ>4s#r=%KhY1##75;MOqGi$)@BSZHex8P2YEb!YHG{ zQilCu!^e1miIPw@75&j2F=kO=y+eBkF#>R9?2?AiMH$Hip;}zQkdaYgvlmxjNYLEd zo<&JjkEkL~%(M?d+pPR+OFV=z3lTKe4k}G$=U|kuZu&Z_E&qsHrcYa=TTB$sHUX*_ zyO@D%7!9*R7 zFnV+adjyg6)8c1Q2Y)r}Jk5PIA-ZFft&}vGRJl-W&(oFe5X0S%fwWBn2C9f7R|9m% ztT`4S8X%Z}v<^DTp7;}z$^X3JyV=I*IyPWKp77;~bSHY~z#YExPfAQS6eau`{3iLp+6t@d{o(d`1TINBNOZhC3Z+vQH09`>v!p6eR0NgI|*j- zzd3a?e>(d8(td`Y1H`8VD!R9Zvcl!Ty{IC_lywu41!yAY#q+y)WN;nL)9z+1uF*Nb z2jWj*hdc?PDf619R-|9RnwPdp8)A_Sd`Xh<3G02My)@PIlo8f2TmcDx!kJ*|KqcSl z#THV&_;#afng@XCTZsQ9JT-?clThPv(3QcN%CMk8?pdWS(7}GpHs>J&9 z487vH(APd1qx^XYHV7-7M;U=*T%(*szCjlxDoEUeYYt~g=PafcpG@h{zUG>zZ;VPg z?Ln1I1Ys)0;9r6_XcrXfr2e(2{#C4MIHJ_Z_!Q5@eM#%w8ip%alv>P#?Epwgyva^< ziMaiga96Gi!^8k3ruhxU6cgJWMoye2wet^gvM&>Kt?o zo`tf+Ayh9E_i~jA^}tptOL!3MO6Uy^FPqX(@L7DP_HEPRW2o)j?a~LVFzv;vneyvI z;Akw}o$4ut=F|1T=zX1PSFM1ig0$ivOVVL3YA%wE2-P7_VnxZlZ--8v&=33ArO}RG ziu{cLuuBn-DIsOlW&)z>+UDXZawE%=P0gPZ-*azmw9>^)MFHT6hvWElOu||gD7SOC zJ23kpElyhYyC1#-x4$fz&DsbnrPS9m6kJCpX`@-PGxrzn@{e0|YP;f>F*y5IUS5z{ zaamz-{I`}^#jNbDK~uzjz;gE>7L;l_(NW^Jm-dNE_l3XU%L@%>e?6ncL`6*>&X%O6 za_E@0QFV;E_@X194r8B&j#69T^wT_?IdoIn7eqliClMznn;J)VTFRD2heyt`R@{g6 zPVQzkZch!JZga%P?d&AnG;R>JwGv)WqfYzby@+4P6cJljZN_#2m6e^vxjZV@UOAA z*5td%KP6_wc+(8@8c=;MfE5=ZmkUZ7{F;n&x7^_Ao9~{)y3sVcMYg2ehehuV9#3`% ziCRl=aA7v>L5I_x@ctVcqm|)lI_95P6$H$4 z{|li&UcWA1zOueB;n@%mti&a8UOJLS!DGyHHMVgrmSvZ@UD;*t0I@7>syBp*kh_Ow} zisUzx+=M6}vU(BYO#zTkfTA@iiSc_UH%~N>H%aoUDX|`i?`G%)BxNfclssysdY5If z-EQr;t8^zMC9DOMkOq7KJkNj|(Z>qL6vHnksB zf7VN3NID8avVsb}t+SDjWDq&$oAC;ou}}X17+!izbdvh`5ajN{0Cz(C5*-I{WsGv3Zg0FBBf@Oze3N{?ycWj}eaWpbKK zV%cD72~wF^SJop@0Hr`=9D%(m{po?rl<2M_h3&KU7NO{GsE29`*q^Ti8WcNtN zbb00qc&~`~%3IcZeWu$U<8ldd=Wu1U1vrqPb+}0(Db<6eAO#$4(yq{*Nua~15dHo4 z^yRv?fu_Z564kot)*XDrw_EOo`+$BwlF$SrytpoCg&Q^ z)R4+c0qJukFJy%)b*)&~1fX&r;CWY7jBP*|(e)nuq7`AhZsBUoSy6WjR^-&CQ@A+B zD$@PKty;?tTdq-V(4J1DzYwiS9Ki|-N>r1v*pEHMS0LaM&Z?_O(=dGe!aaqC3O@8b z87sZdmPX7*LILnAQP1F-)3zBIZ6^qM@p?hdy$Rt zno9H6YDv}5SA|4%-iVfVALCwUZf29H9=xl};CLKS7o_fX9`jw2IfI;MVO|5Bw%=+= z!Rx&da*pS0?@3osb~VJ4k&V67g>8}2h?Y55SBc~+tCZ}Ouwhe40LgHkT8KZ5Q*j#| ztOBq%Z0F&~{&jaPKV0D)wI9x+ypZE-NdSeX2lB3?a27+94ne`j#0`&uq863fHkT3( z6as+viXL?eZRNH`;YB|hx3jNQQi3-^zXQ0hSBg>P%cqTdfTRPyK(6>2=GB052;PX2 zqIwE#Cns^tQq)ziVa}3HH|i*gJA)Y*Raf1BEa#CGX1tYdHmcjX1)?#NL{w`k++!k} zS1%mXiXY=ls0hc~yc517WpG;!YI0@Vo6ba7%79 z@Fl03IYP0_d&NX(kbEjM>NcZ8m}C@7aG?#U#V92y86@&)d%kWO&YHbBqg@2T8J zaqX4rtskpTv`14^iUTANDrl0QX0?l75!%r0w(dBI5$us5raYSlmFsaqye$E+J2j=n zWkqEDtehRFI>*CEn+2&4ETgynjYZ@(oaBH8vNfx2Ly*S%ilLbqQ06@q&^5H-LHJZE zQsV+tE9pzx3xNRPG@_EV5rmB74tU>{SaAKV%bd`XqTE*SUUUy+ZLsq-7T*!c^Vx{g zh-bD~3Uy<+(sBV>yoDXytOC>=i5<7cWC2r&-42dnc^^8Olz^HJF*$J%}fI7 zOE0OdXdUbm;u4-@@*K08nEj^HHVf}uOrTp)joLW*CuIZzN!cfU>PNfIrQ2_I`FulZ zySC)zUSq~XE!x{_ExDn+8v9(~4P$j>B#e*&=T?uniye|126ecG^)~d&o}wLejEx{< z;W*f17{+|)_=7_j9M?y#eXo95UVJl9_dVjPJgtMcy^Ih0V)nw!y(n4Y*(6)z!a@?t zqa}C*l>&ONKWO=L=eJt!3hb2prt~oM?!=f4K1XJ!aylJ}#&8bA{3+frmwR?n?Kgy% z zr8k%+M0Dh|m`UObbZGZ4QDHX)rE8fLr35Ky1Ozw`le2(iWOL*}=}QlZuUE^9txA^c z8&mEC%3CQaaV`DagT8w0x4UXv++AyK;+v$qb-|m&c1s36!L2}^WcP<%zx&nS5AfU@ z-H|ix*K{IGlqpU&*V-pJP6*jx?nnOsdfTgW9A$?+&zawE~V1VVb;>JcF; zjHyuL=mn>wC?G8bM0b>s2rcK@PBGH6d{0|N)qF!dBRy}Qfi zMshmuwmINn$h0Vu zg)S}1MAq~=1va#jatY;+BUnQz>sVb&A88h}<+HwG>lego9Vlq1*^~Xj zfNpji?1gUj*q5Fp&krscTj|5-E`nn$Ytp5sa*X(q?Ix$Ch9EI=yCF}?T)d(}7?e8! zg<)g^f^oUrWE_Fal6EZ2+ge&BC2}LJ3M*S};Oi>XNyh%;ovSXrU#zK^w%24+_IR#5 z+w)NKO}^UkhZWiTP5?Qa4Cx?e6)Gm1>3CHpK#sgUV#<12l&RG;UuNx`8=qb%fNh)d zCb3+gP2p(oGQzIXrLS!Y0VpKpYFXu!nw4?6&1N%!^y{J11EPR)V}_oU2ZGX9`XZ!l|at8j?&7XTN35qwtE&)H&Qm!1T^@ znRVbDfJ6zoMZ74&O|8iCV>}k9?ny~xCr~I(jUCdUf(CE_1Z0}2mj*=M9Jxq{9lHr{ zEwzPZIu+Ul0^unkD?k_+Bpl~%rW4h|vSZGcB|v5Z7|`aBjH%4$NjTJ!2Al#x&M*xN zS?(N2%!`i`aOJqB$suV{8Q&@>X$U%#jXmud&U1n{-mIrw+yQR>{KesJcgyO7Ocs{M zS&PA(5JcK7l6KA=xXy<2k>$loXJ90_w2-F~LD>C}tZWt3eCSt*xOEF|MZ!DDLx=#B zX(v%U)8&spkBxl^=?jM2e3oGk{egN)6L zSXu@SG2%bQr`;_|xIFfT9Wxv6III!?<%996InZZXp6qTBo{r%GT3kU%1NVQmY%7yJqF8YOD`mAP5c1GYbf*{! zNF#jY3LI`6=}^*LWd)_DT2Q5@T97{8o_On=dGf6e)o``mIMsYlZWjgd3gY$-!&76_ z*d?CFwNtva6luu~gyf#?ao(&t$JJ?Xh9~dJ2TA9F7g?)lsn>XW$r1Ow^5!;zvR)dp zPruz$Znq#6Y^PmVIv7V)E91y@{A+pPP9C>#-K;>jrzJ!w_mn&DFU4QDaulu_vB2T$ zQZ4r8m5l3k)$ZfV$F!s4<J(w~h4{{Yu2 zbn4Th@lvetiU+7+mJeL4%NJH*A8&{1gN(A0^Dvx2z1VUIY|YuvZSA3ND3t+9rhz2 z);H-Y3kZ`9xUHf*r`XWsR=tMSag8IODpAKxfg4r4b*eI4XM5c8_nh=rYPDK@Rc<{n zAOI$R+1f06tsBwQUsB+EN;xUW{;;ni;Vb_D{;KUt#>Z;pGKkDI6$1!i328p$fS=u6 zt0R^wr9x05yiwn7Mv@Nr+Po^|k?*1OfPrACiZ~8R*g>`bBlC}&g$tlZ@ zY(1kf8dcgq3t8ev z!H(+;G+R@s3et5HkP5Og0ZH5ryViYB4yIZ}%7floBrIdnWSe?8V%2T62Ne|(7T|H# zi)t#y2-AU%W8azuZ*<^{jBoL;r|({geIMMT#%=p`_99qsOJ1n!z$ZJCr68v#sUs(B z)_HLBd+547^je$Z?kHu?WlD8%)P~Q2N}CQ4F}VW+e*=qjIy!}gtOT2rzrTJ;+H9#) z0_zvF+Zcy)b;XukQI5Y-RE%g3{gABf9t0C(2o(S*3IVC z#C#zswwlWmrx_88C>*jhs1l#+a>n_rmDuwBDrWuj+n(yrlY3&L#gBfIkvYoD_!Vv3 zp*d@shuiuswZ4}lr3GarV=3>B>B;u802$99YA+vK85-+BY-3rp#=rD zT#o+$XOMrRLq`CNo|!q^*3xP?i69TYJihb&^YstcyHM6xQyJLU5d=lZ*vQ-4-nkUKOfdngov+yI?{_w4*v!^z?c44Zo@4Y;&qVC5_RsmA#5KQW5W=KTu0 zTKId391v~x#~t+{3^wZBOMy9UNGb{r1t%fEaB_2ya&c>FeJuc5b9+ZXK_u8%;La2s zsv3H&rgt8jTYH^{LWH=U+YaZ%7YJ>&E-lXA7=*0ju+q{8)r66#0FVGtB$Lcc=Djc!P-H}i&*7Y7Ej>YG{2~!Z6I+gW>kgyg&=bbs_TaO;c zeU22rN593iMX^R{Lx-0fVPVYuNNfUfN^_6^>w%2dFIBBNCsU;6&ldRH3k}WYIWDir z)oU9jAa%ceR(o*1H-Emti726JLu$c7cMjdlG==Z#;ErAz$mik9$2T69E>`n`-K zWw^q=jwFQZfdSy8N@KXT7CoHfmNBxr=YKzVcKZ_LYHl3&-D*TgZ8B0NX!@oSoOWYD z8;1MGl2h6pDk`SqTX@B*Di(?5ZMhv_TilR>>+BuMgMQM62FIQ`ZB6hjM54(u^=q^- zeZR3ODQ+9hmqRR+l3oph3XZHNJL5EW#KwW9pc8?Z$>+CI%E;|Ej}128pWAg?#1`kR z*IDecw6yDzl-eCgAZke=O=qHx0)Xn3B=R*sgLu1&eIM~#QermZJ7grygCQzyxk>7y zk+Q(f76(qEv>A4#neZAgqc<`_49A#uS_0#`>nLr`%>;$_N;N5ESpcsDk%|bq8-1eR z5vey7Tw}>v-;FiID@0vwkdR5=3RYA+Qjn}wl>Rn#6oBylmkHl_HrPSmagMv$r;2_V zbQ}r&{{TMg=?_PqlC7LuAGGXK6IxcZg6_NoV_H@S3POM&;EWJK1d0xyz^k}SnQDyX z!Xyi{rN~fLgs|>Tty`qHgMqqIMxpm|ss!PF9^=@J$G+U%V>ubzpNTy&MvS(R38$}O3epOrC^U@VhdE6psFC){aVz2q9g?bH4t;;4s#dMjf7rocD^i*+$hdUd(~zkPH7LMl>nq?p6SRO`^VlB z#ka|~c`UznSJWkv8gVgFlBS6Stqto@%7EsgoM7$Nk#oLZ?wkOtZIw^Obp=tF?Lo|l zxc-SMJ7$Z-VgZTSp-?0hh=@) z@|3xZ#(YLmWM{HclA*0c9IYu(*o<@7>;*{T`&f}-ZU@p$PMamf%Zk(T3Y|g-dJ(IZ-0Gb*AnK*n4ZHkz*Oc?Br1x-{X`Xw5Xqd>(Z|a+|vb(MH2&}v(Hk76H zs~v+NCrDb4KXL}-^#eH6cx#8_+3hKXZDGRFcXh=at=08Xk5lE--9SQ54zoNpCN#J0 zs%WT%v{Y3fsFf{YImsgc9z$?3Mw}&iw=TyCxwjVMyunt6+;|oCddWZ}pJ)sN&zCye zN`*6bI^Em6E`-Q*q4!qvVJb@0Rozxck1tk(_lL5Z}0DLGE5`*m| zV`09YU=DRlv?o)k6zSA~-Zm#-3<9gxy_$X}G{_zKdgFEF;!uZYnKM7pM{!{Jgq(gj zubmgQ$;}EmY^4D}{m?6B;x?YQdOVlwEh;dTAf<@PR(mE?phA4G0T|>tbgWh;ryWxH zm8h#eIV9BeG=&x|<*bpHSb zu)BvDTTuFz;*}*XElN2gC%?XbvsA!j0fDs{dL@h(#LJMOpI3~t?kXur{55_xM5`Lc zutP)ztgS$(t5)Rr(h0!lpM4~i1Im%9T>1N`q?{yUZMLrzdSFrtiB@SUI5@_{(GrQr zE_9vLIes}_(Bw4aV5hoi_2V1z%~si#HMrhJ z3POPMY+gGaT%8-+gbDYr>S*J{OWSxe2)dPerWbNfw z7j@yiIpo$t9Kw$xbB=uK_V;yxW`reAc%cPB4WTJ3amAiVLPqL7L%^Ow zwdghVbm76HG0!6r)O!_1i&I<@=e9jQ*UWR@Wt_v|=`LLD#?c<*YPz<7-QGlob+X_9 z>Q<#>IHIk_GC32BRp%4%v?z8M2)EnP`*0o(y51A5@r7imL}NKQ-(Yv!6?bdmD0gQ2 zTVj^5g_Jtmh$wX{9MDj#6p_r6whc?ST~j7YkmSd5B&NV6wU9#6c_|7TDJRG92Ox7< zrWC;d-1q+1SK=QKt*xiP4-LI{-p0~Ey@m|k3vg|`$hcf)wHi&nJK;JInPpw!wpFC* zDkDBP8ghD7)!8x_wM&s_Zc-*P!q$}O)!Hl!gy82<&xh};@Q{&&rvt!L2VPi1k9NT- zLdXqZZ}01qUN=Eftwxr+5zn69c)k}Ux`U%+KKU>#02^GJ0bnGaNJ7UxNW0rpYT|dU z>zb7+b5DTcSOGcLgz4=2*iN0f^2n>%QRJ1R}MKA+i-F}oy09a6fUG%^4LCqusty|0?4ZNfga)5aT3zRa@MwAVZzc>zN1hB zyZ{FqW~<|H4+`I{@vdA!!3mLbK`TVG{X?q(M@d&^;!fik1a&)BX(IE%w%DDVYhC`> z{sxy=l|DO4KX`;KDI3-7dAC)!84!4riQyBCZ#g+H9XR{0rCoZQ=f1vO3khG~Sw+bSeWrl)^dq)bK$yRy=d%4#-x5aCR{{ZD#x4@Hm;tae18ZE_m z1Pj8!Q{gPC;Ns{v#m`9INSPu8{p+rtgc0j&oX-=+RCds(ONxv^P65EcQOL(!{3@15 zqC1<7T4l9zb%!2U^%*X{aMQ}~A$kBhbAhlVc;by*^oHH4-!j{9x5c~NAh=_#j@cp8 z-_wO^1wdfzNZ%XORB=_Z#~n8-*9q|oAhF+NEKi2>EFM`(kmy>7M<7-G<*J3RzX<5k&8p=-NeIR)G z3KI0a!~9u!wp(r%yB*r)Z&PJXGX^Peg)5S~xhuxw7#PXtor*4Aki8jQE-9JxrQX!z zfg#Z<+~eg~9Kr8lSOa~=!)>Ux+vx{@QE2rWKORSpP}YVTmf{&8^!Ct`pN&}Bl^EjU zG3EC2-gm;)UG!Z^p@RPa)#shp&sR%~i&B`qTzE%`<6a>)8$zVPb*RcIKtfzlDAX_j z%0U@B?aH&uMAIHpWHmW)q!M(kw@MZ>;C4IL+YH^kS@`lS$^lwPcPJ?ic_~_mUi;(^ zI`ZP|%cFsJB{o}1feaII!Mz0(U=C3_1T3MH2MdlPhU$&n5iBUQnAtc~pu_9wA|!M zN+F3Z!!AV&amMu&onW{W*4j{estWIBiFl*Y8o0jS3Ovh%+XPl$))LD5RJe@fj>wRm zUv)%bIXDW=YTujF==d7ulJWo~L<_+38(1D^E85i?#ttkeY;OQS7#k0CCFs%j<8bWG z6>zerZP7lMwU1Z}*9l|m5C|vPbG^SsfzprcHv4s=^KWwJA|2%tlez4QTa2)fuI}V4 z0nZ>~W93meXM^~AgRYkdJT2LNZQ=(Jd7*o)u#on9sa*+*V3)F zMd~f419v9iGwUJ1oGm@7z&)BfMC{O3NY9QjzQjL*?d!d>ejGrjrrV!u=++eTg~H=I z>-x+juL@B}RyrNdGg>n4No}-_#Vc{NfQ6ID4p|*>Li|Z}aufilthV&078#Dx-VG&7 zS0G8~Hzxp`4S_pX$>`|nYE*Hq*@%p0O~h@8;=IpA>$pgOvF;$0<*E6%Nmkh_ZJCOS z=WNhaX5JY_r2VZ57&rqQx5(VrgW3~#b~~*iEY@|UOd}e`fu8oq2M4Y`0G)<7n9aHI z5@B&)X17?197Jd?C@|UmW(9<_gI9oC2u*b5C{!q|3Vl-Cn3sxXfdo~$csv~5fB}dO{_X^@bxk(cr zPbC_8tE=ce?4KOea)LRH`qBz}+ffw6T(m2+XciWJ(@H2mF;f`g>-;<^i?q@bMm4B8 zkNAPALXnUuQASX?ltf%60Q5!- zWrF6%N>t`_1rTxuJTgT9LqTlmC?Kb8=*DLg2+)-0Z4y+%aS7C+THD;=OG)kpBpd^b z`^5?{6m=+)N!*jpuhh|PCjiqv->LIn8U~5s2Huv>EwLM?lAPrDZCX#FIQn2umQn(i z@>&NR!azTot2IX>@2!8)+di+s&`AI-;HQuGlt}zBSgGS7Z~G(iuC7zER>6guT{xD^ zo7~nMoXcx!OO32JN^nAmJCTZ$3g-poq1bbyEodQ0(&|;e&axge8RJXg&;J0#WMlS< zs`R!98o=MgX1ZVk*efV;YosmYxZ0Y-O*ogF7+k#N?&^&GfYTVW`Yk&BB!~%t>O-L zs@0Q@MxT>~x}^YWJD?04bD}R6yr*G`^sxA2)(b8MiNJG7(oepUP6+Pp@1y8WLUPjW z&6v;J()zt$)WBIwBS_MX3D^>HbsmFZnv34n2|fF$J>h~+e(OKmdgoWz##^dR$Vpe> zL_BZ9(l~zXTU0qr#drmrR<-{C z?MkW?YcjPlQHDWERU1`-?ZIDXdK%&N@W-M4AGbV!ht(U_9`jPkQ77k*g-`lbq)Q4h zVec&g3YN3z1V zUfv1y&sUVO8w`Vo0#mv|lrfBvwt42XeA}B}!{N6-Kh)d$T4)cLXAtH`JY2!@Q7omhrV>mdxRcayDcsW~{@>gm%I=M_60o3vk`okiIa;339t zZYVrT7nJt`QW8l6I)(-b+dI~6PN!8=HPrEbT_Sdaeki^nPvVb#M~1g=K|f>Xx)PT; zrmc>7}R&%c~Rw- z({E)xoRC>mbcWk$ZIqRC02AOkQ7@=)>^KX#UE@SsQI)zMX)Dl-VM_8l?Tz=xTGDk0d(n$x}?x-IT(y+Lu<2px#?3Cm|{x z80Yxri|w3kX1&^9wZXTs3PXqmP5__|4(V{6lAH`6oM0_K(P#6bn^9Lscdp>;Yucpf)U9suxDhieLJ(}JHZ?B#}M!^UGoRim(>kjK_ zmi2|kCphyjK0>0V+6oiDbtqbi)ZjeIw!j?gHBSzm2C~Nj8tho{6K{-Nt*F@29rXkZE%G2?kJK0=Kq;;P?r>8KFm~br#S~0JE`h(Ycb#CX| zC*MxkEc5P5C~vWd(8i};PzQckPd)l)olsi1isNrf+*~YCmmG8Nu&K$2Wm2^r_V?!|1^d_POT z*EQ07{IdrKV0mAr7Bp&I4`fG}^9N&r(Ah$@aMuHRE440Vs@4@XY3%)OK@Q81uMKC} z9Y{_RH#n~x{{TGhkfSpF+SSAENl;rOU)O0TVWDVPBXBZsPl9<>p9tWL9EBn5mm3#WMXSJLoevm57Y|09PdW6N9Bmfl{BjhQ--^$7K7Epi2Lq550_^7zSzFUGG)tsd zYf7F~xxlrsk+II;kPlLnfxh)`aYC9F>s%`osC-hl&#cRBDMDWuJ*f^Achaon0(RSx z09B-W>jG35&RHeGdeT6gS!=W{jU*6s{ga%MPUFU}p9h>nvgit)qH<97^U-6Zg=yzKH1wGmB$uJjW+JLqLAhrLS1+#NPWNnTbTZ_ z$=`F#)jGYL8dd>0+589o*-(Xr9bqzZ< zLKs5V?4+qcWa6QzJo}=Nt!*hQZAwbgLXxDM5JymaYtu1hVlTV!=L^n(dH(=6mv(Ar z52P+ir)Vw8M`#-*As`l&sDctovO45$B+keu6>E9pP9(oaFfLQWk@W?*8)Xft^?`t= zRFDgb9x5ltnKe{4Z0*y=p-rv>>Rt)mC3)gayK;QO9u2ZsB!G-fT z7$BZ9y^sh00GPk=`o&dwPEH2r6>r8s{{Wr$j12z(+7JH#^$k{X7zg`AS5deNA{bjbqT7^&IFj1hLNZI57Ii5{IPUF?oSL2fOBTl_ z`eBW6YAa8!+Z9GJaN#MC>C%FugMpzAowqv=FgrTw2HSaJ<#mratHd^v+fAiFM|( z&AJXyEID<$=8A%tJVrYGmdPqy$FEl=XQ8#tN|S`JR2xz-c>;DPN%@Lzhd^4TT-MvI zDoT=~UukV9U~Vw52ms^*fsh8>NCmDs8kF0%sI#Mn*(T!ai(3U`q>L5kDp64wDJPL5 zofPp}?y$WYNp9HVuiajaqB^xH)fZg*y0W8;oM$It&ZqGD&Z|d>+|OC%ef-w?)SFHV zPI<)Gk+CB2BKHIo(-WgjZd9okC??2|vb|E7J045D{-{AfP@t#AZ!vb$PPfC51wzs_E?NI+;~a8$F`-vSY?~QZHWNt zP}HQ5p6DS&41Ll?5X6|xx*A?ob;+2t27_T`Ah@*zBMTd#VOc60ft*%dSpuV36Cu9j zULas^djb_zs_LrO4kp$SU^>_U44d>savF#@koK-A#9Nx}^|;$=QWLo;NdS?86R`;= zC%izw#5kLaE|(4^mkwNsqU~>*w8emob}Cwyptg=Mz0eYSK&`Qi0ljFJejvD8?h_Hj z(gNE|b~fS0oRgE14%7{0ZY+K25YL-+G0Z{;e9UfjlFj}gVCOWYGtUAkik0E3W!4bg)1b3<~%nexL(AE--{V8 zTBjBnQw>XGHR_o{NI*h=qa`X(@rufp;c)r1jZ-F4Ua*quq$JtQUtwiY>{j3C% zPC4h9sJ~4~<+NDgc+JK0Jrs0b6sb|}`Icj(SYkn#k!;F#edXBq7Oc+|Oqd(h+dKgY zB`Q~DqC43r10;ESs*3R<{dt6!E~Yf7=_%|Z#*TemC?g<%eY56ycnTnaY__nsIBwB+ zy7spARqNAbvX>i2F6mLikfW2QB#Z;G*o9uNTr+922)6V(M06)hs|Q1&IKc#wo`-IJ zV!9fwJq>gNxh0+Dx6f^Odhvs;J5U0VGB5NEbPc?e4M2r0ix@z^Y0x1r5wn~ z+ymi6{))q}xY3K1!UITH)YB_cvReztB|s9MVG18Du~xhvYp`2jGa={^+PA*6rXmms zDp5EEO`$j$&r+^?(NKvhLS)8Wz7z<~B}h+XL0KaO3)lcKMsR9En4(J$mrpN3cEam3 zTq-rjpps{nNP=-Y10#~F;%hkn06AhWhYf?g z5w&@F;)_ftyB97EsbKfg*efUlnO2Z=f#Pe%R`6OH41ZnSV$)No!c8gkO>rL?Mzv1* zE*-Mrbdbwo$Jv0;U#qr#lB|$2aBz{5amuUj_t^8eZHe}$8kF6^NNtrAC53t~MKF6~R^B5=FI<>nd^d4Fr2jSAo=b zJ|?S67VCqlyFs|3TMlY*x0rb=Y=NBe-y_O^xaQ4zdA=Bv{%RYAn#!=bj7HGnW&!{U zN=JD>;GXd7zf(m%h^#U#u3s*WTUTkf-5yJjtxphE!U9MtP~?)Vj_*JKJgT-3<{7c? zZ{9YJO0N{_7B>E~>urypMSXX)v34vw9Rf6%%gYj0*l+@Ws(I)00Q40Vz8kz-Iux-MBZ>h@8=l$R1uK!t5)uYTQ9SD|k4;vMBg8CEC>~a~a;bX_ zR>9e5_r5-`@O`y5OFpa2*oi1oQ1C-6C=;I9?Z7$PNy3LGR?d(5-Q`a;~^_`Zz)SUkO*%`B;XQM4*f?VwNi0pb=>W7rN@0y zW!t2pYYq}YNkQ+V<9}yR$5I6%;+gBX+#}9Rg+CSY-)Q4%LY9mIh2ZX-q>Zw0asfEa zI~qY!Hvo2kl?-473nc>{Oz>z**PzV?Yu)d$KSKU zl=|{k>W~t$2URQ(N5s`V)6uPsx3m#(fT9AE*vah>N0zXlN4U|AhFo^bh?0+~eIYAQ z87Uy-9ChENQeJM-oQm_t<^rKUw%LlLr0Y(8*IcV?VB?p^H5OGhcu!7wC(GYpzJ8ru zO$|?m@hv`mZZ;bNHqV%ZzecVFW)=8g@=S=>{{Y;Te(I{3z`RQw6*Sx#nEwECCj|T! z6ZzJpgOq|sHqJA+uTI7Q(KLKDbCyT6T4g8Ql^^+&M+#Q^CWyIP9f%d- zBdHbUK|AfXX)D0M#Stj0WBJk)tdWpwz<1nZ#*>jmP67t!YDr(0T1eZUg?OTK%7~TE z-(F`P`bt!BxUWt}A}EW+4e1I`Fh=yH9%BO@RN9oFc>pMi@;CakU)Ud&b#!0#X1}mM zD(dAsC2Sa9J#l1Rd{uW)Jm){3+A4=+618@0j_BrShmIl6i^UgLpN#uWIW^@WYixw6 zC1(U4N1ai$qk-K3bFQOs7DJTFQ9`fJw(bo77w94%uaJ>~1Nw#+7G^xoV2!IWKnFl#~Spw&oL%c@Rd#bKe2R zq%xKfAiCH<+^aih-(FLwDhWa%5wRL)s)-IUgy{+?DjJfL$dvN~&YpnEB)Yw8A-3Bq zSRBY52hNGOKY)0vir#whY_rOMB%r6axn-E+P{NjxyNejy0FXbe*0in?^ebSnmgL{B z6EBkxGubjsXyE&jb*tj69}3&4Wfl?PneNAI^4UVWNns2(KpuxUS_S8dew;`?r*2A| zQXU|sE{tg;@*rnEL{aY=xZKHc=d+1uXcW4g034rXSlAx8{3z-bvds?At7JQC5g959 z2~J9K4n}-N+^gA7YYxk}#$rB(i*2R1LxoxAlYz0o+sddSnPHsykH-CR0{2tV7twDM zjGOy;*yDAYSr&w}<8s;!p-yXT1A^E)V{Tc&I2CVjLrIR)=EOJzkZxbnL~7N9VNceVuNZSSBdO(N|sByp@|VJZO!APvvpKkF1jb5-D4 zN}7=f+b*&kKCY0JsgmIUzd%60Ay_yBs13RND_spgiVWdY-8-u^#7ABFE4#>Sc2L=A z;+Yas6r{7L6ps}W2QlJLl|$nhtUB~-JIhpcEyAQWsK{Dh0Cz$=MCnlkf>Ma3 zyHhtt$E&T0(c$P!y(u|PHyeY?Imq)H@~7g*xi;gmTDcJV_m3}}H0$=8HR9&d-(e0V@cefkWrr4|VQL}WA;f|9io$hn zKqppb#jLe(;;GZww4p`!iCUEE5~jd7$=kf8ZgMx~cN9^@{3kLc;!G&B7-G|Nie;iQ z-yNbbkXDd^%e=4wBLgFzF;2p=T0LGPkge0=Fp{?Cx}>Bw!#YoWeWt+sCn+OsKyS-V zoz(QYhPs(I1RLUSIL(hSwX(M9DO6+pSH}&#x({hKBMPk-)|t1#fc%Lw7-ht!V?|>r zX+FXmCkxnTI3QyKG*`p2V|^QWEIN~6QBH@S0BOL~w1U3&H%>A~Ty>z{E8+Rd;#EPL zbB|?dO?ZSCk~ zm~OnxcT`p0Q%J!8Cj*!}kPS)VYl`8$GE=eHwA^N>6tJfYic4)_8r>~>JL(z9_nwum z@k6dWB-+zyxd<#N*Ve3un{#27wQr=Ii5ccIUR7;PGfty`_+Vz{CMW6wBgpl^wq{oG zYw*2-1`W&{a)G=-pS%g)V#l{wE;hebmGq_dW*$~onhWrT%0FprG%VXB8xOU?0$A}{;`Ew7g1eRMvpXlr)f(k->cgC(~ zenN?_O@%{5VQqx$R|`Sop*nQxR;-`0l&5ffc@tS2_lE%f{{WtAomKSy8>RxYW&7>4 z`CQG9Fa%tzm3N02irjXx*;+>60z3hzk>Xq?$wKETZnPXoE$azM-RtQ8-p)BLpKzj2A>WysUersoYn z1Q!W|*SqyvrGeW+wkYzP1JyIzp@pj`j_AlzQU?1gJ8z6_R?keCDV#%sq_(ZxeM-<1 zu;dQsVdaVd^k(_DPLpMNUu@Ke6#R7zD5ZaGJpMleMUSPlI0TS|44o>H_h_(r0vbTZ zH{@$E@tsh;&hN|b*LAqlXNFtP``@@zZBtPmYs+Qs!7sL0f3qh_2^a^*BV+TaGa+1< zy9u`VaasyW5WQ{ykW!MIq^EL%4oDz!`9pE1CqR)6#4QhJQ!lvKK-LaZfDXeTbLI^_ z!}pJ*EUl?vp~Pjk6V)UfVEc|o!l`zU)Npk|edq2(=Kla~mW>r^nkq(^0v*$xt>~x0 zNVeKj5tXV!m~R6oNm1vwhEBq~;Vv1xak>P#Gunl2O2Au99QUx5X~uG*GB;L`HtwD3 zzGUP_Wo|t38$y23I0Ke9_u`^(-xBc!j!f6A(a@MOlbuoIw)Rw^>D&|K%pjN4C zMa%_5-+m_*wk7*N1#s&TWlviX7|I)5mwQJXJn)qBQRjptY%l=8tlx~iCeM(g63&&| z8YE%WB`cQb$U7wWSxFmY>QM^VB#h8#k#oP?l_}Y;DS1P0&y?y*h)T#vI2$23Bf5Y9 zCnpr`x$7OJhTfFQZV$ZFi1K5tSXgmbExd&hfSn|QcR3>)gH9{L)mKRRRK-GhdPtGd zkOPVcec}2q=MLNnCl1WJmN`csHa-K)sM!KaML7-0Fnb;lr zWUF+VhcT|EH~#=!#OHNe!-_LF>U?|hr>k_KIzS~U$`Wv#jX;Bg9`(eQOOqE)CH3XU z;v3C|b%oe1?)%S`w3a1+HwaT`(zTKXP?Ug3tDXpe&9M>elO9rm2M zN=ip~1rw(wAeAT$M4GovtU7WkN20G!r>aSN1c7h$zp{q`m)4te(;uA57>XHkGr)wr z07_DMcd!zn?jURcs%g+$X|Gz0r4%f7p|YO#Qco`glk(HM5Qt73k*0Gjaf+tIUmBY>2-A~ReKyFJC3~9wR{@} z89x58wd?`8iB0TA-0RHja6cO4lY@cEkgj;%wyKku4rG9kfA<-LTaOi_g}K?Oc+OOINGBVoBI#ic1kwo7`M1`s*A=S1 zoz!gO*jRMB#_DH;WAJ;%R-x$Wp2vlnb4dmFR_DVwAA)FrV;wV4tWo_=#{wz#nvQ^f zwT=G(Gg8z>PfDR|UWI%GdC#HGI^sroZ%HQ^qAwIrA}g`*HOKR$sYf`$qAnzYPALl~ zIT#tQN%H>y8cLL)H|66*Ug{_1*1gnl4k zj+8}z6a7`6>?iWBu8IDr&-N4fS63<7D`3K-mg2;xlb-2XvNPvZGq`TSaLT1y7!E7# zwmUU^(~rkBHbd!D`3Opqq8?Bn9=Wd8m3u=29c!rE1(4+lPxNtab1H*!eS&!+LPF9% zyasALi-WHg9wC8tN>?R_vJmRm4Ov!@pmx%7Hu1$9Aw%q_kH(gjsVWK^D1D?H5%I5L za*cNh1SpT&ID?Gm&a~fnn%ylj8|`7Kcmfk>T6I063GDl-)2CvkCq&SH)5{D414sDNR0G#7bcxT!*N5Cn$yK)x>QmIOB>RUq6l2wtZSR{;* zfN)Pr(|CE~1y@I@70&(n!aAF&z$kuf#v24FX%snknC!OI*rCG9e%gX^2fxCtIKDG* znMgxq6(4IARK&)##B~<|9jNIYynXWN$s~XV6{u~$$7;>1uKFznjh}zi=L(m&v{?_j zuDLxop^1{?3%v1oyUn&OmXfPcWyK&VSOk%$+g5Tkob=mzAe8c)ko$~;G}=^1K^-~b zn^KabrCScW^s6aXZ9`%2?ayQ6v_6SXRIZeNZ&4LQazh1MgChQqH%NO_~2DGSsvQbyq6DjwoTB5Qp;Pln>>0z_bMYkB_E zDZJAJ0|4bwaK!hVmvxUf)zffo=0wSHRvc|@sV53az{W6ALC%sn?@+JYTf=sU3bH_E zP=y)gB+87k!zgo^NO83#M+EPuBP0TMtG*kH7Cq6SXl6@M`!xw&u*ReIM*PS*-l%w| zfF(F*D%){sFdqvpMR~o_7O=IYE8htu6U!qfepsrNMpv!GTf*Lx1jfXR0(x8Sp{LVQ zsi-w7I?;|^#EaPe`9q&>P0xrfSElY!V_N1y3w89e8gMP35D>Ria@vMUfFSO3wlZUJ zm#kjx(OIA0vq zrr7M(5WU87bm3IKJ;9Q9&*x#99Vf;b*PY)6&~V7bfkbuB#pqwfK8HQd-bwB_?Ec^g6eIt zC88?g1*Ki)+-*b-=qg%u4!Ay4eyeTr-*oy)-|BOpksT4=M2A$3ItkLSpp%i_++cFZ z@T_doid7t1WwT++slCiVCfLH{Q}}HGs4sov7lCXZQ+&e8xW>;G>BVl`pPH80dJ?2Z zRE6QLlW>CQ^E z9q=mu0Eu`e_TTxrKF@WtddzgX!sAMr>c#bX!j@8vj#5b}BRy(^kL(<4dcLxgzzMe9 zT8vcVk*;c-96&iqDJLiy)q&S1o-40MU#J8$L63266VmOq!L}iE8&vqf5iAFsTGApn z$ulx=5d@1fkJ_i(WW|hZ(~yRe;C!Xv(oh$zTV$0wM#O*ve26%qE2WVDtTg(NTS;xs z)i^qc1Pt%7!Rzy?=XjzbxTeWNEki0&b|dY%7rrohp19@4 z;Q^4`XXELXf_1vmf{OB>Ml|p2?eWIKh&TcWym1S!y#%t-z)KB2vUVVxl0Dpqhdt`D zCkDu2D?-r7FS)5NsaP$b9OoF{;CY%mg%(6)Nm~svrs2m^#~IY_5=jHYmU1^A3ea6m z&TL=4?6=p{In^3#vcqBho8M{aWgm*6z+jgq#c6Fe*m1UyoS>3G$j)^6jz+3g#jAA; zIGc;H!yzGYn{QBe9JH$#-;pPmOw@bh(q_a!esWQ*WGF8QLGI)oN0{mF#{C6BjTxK5 zV>+?fp_I6_sCNc-=jDU)txt0_*ttG&V-vJPfwl^uYX^v2!SvhQXUb2qj}Fk&Z~Y@K zGQuRjRO8vyqS?t&8QmZqv*AS6Y&k#Z*s@xY6H-!5R?IrZAAA*H_83thd7GkHs0WeCD_^=A>C~PuF!5UgSb%6-1gq8q8nO~ z4)^uXr=AuXf#7(6NKGdD4XfL!w#Lw7K3quy01`>u_39055v2OfwmsS_zf$X2QW{zs z=zXP;osQd90+@?ZS#}v)ZJ_4~2L~Me=Clt**GHAZa=mp4Ec(-?Fj8_rMl~eoZe$;w zd#KT}n^;)sC%a!8UN82ZrRyKO0{}EAraC<%uV8N!w1W%YEsbM#vG-k z2eVR|Dp<&CUI23heU$|K38?CLxTgJjQ>c{~kmgcJQ3sekGx=hovoX7ssV*oLCFY78 z2cF{wy+e;a$E;}%N(<~{6=3B_1H1>Cbkx33YKUyaasZr041;31+Li}2O{2@pa6)lV zR{1k!$ct!)HQvdmrKNvdph*dAB_&GS;E{p8GEUTv54zf;KXy&(CAxGo5!jL$ergzP zSd`ivTW+sT;UR7$>_|w)4=Pa*F~q?ztw^5agX;*5*{#ORgd}Rp5=KG7NdSz1t@)Z8 z?Na*hF;i+x^+|1*#cil)itv3dsVP#jauR@(gZ8jZSyN6l@+LsrRh=+UI=x^{4r zzHS;i~y+jk4kj=9ZTt_owZ7*>+ooufrT?2H^^JyH~R zAB#HW#jzW>sx&68mg}X;L~EOGxgB9r6dob4Qk66c!B|$UKr92)mMpw7&DKO)bI}B= zi8w(+sz@Z`bGY092BW?Jtx$SDe$&G!gAg+Ohcu{``>6{{n$n*30Z9QV&poq)THUDA zKS~(J{X?fBWX#E%S|CNb<`<&u9kfp2_pP=xzW1oeX=s$`Dc2c& z(tF1`lC+g=l5(HDv8}U(V*M~7KI-C9Mp zh>VuOZP2p3sHh#h2>=7I8Q*+WoX#L@PH(Z39!&Qg5#y|~q%aDPd1U>vFa`ng^Q@Gp zzOXRaj+j=BU0Mu=mBv#Wg(Mz*dGhH< znG)3N3Y~F;%;z=h3`=ub3X;C*2|H{?b5M0q9pRq)l|=`5)dLzXZTeXJC)!j-dnuwn zVoID(-WliTQ#e-pW1Re}9pXC)f**2NnH`jR&VrGKQsEukxCtu$xB!Yappq*(6<+#|<)BcGOZFtHzPo$%yZ5?fPM4vYTxpU|$3?31R%)}}^$=NX3 zR$GSSp-4dr)qqamY5c+5vla_rMV9hPj84$3Cwycmfq~|B=e=ha*)kzIgj~@-hJ$ie zwY@c&5d@G3(6*dPaOM&S)6`=IwQIZbnd0XghU`g&$77_uDZ{U{X*(pSB%oxBl#HH( znZ%2Q?t2kg;dgHCK(||CB`I2Rk6Cj7;bag(a7YOUq1@uPx8RS~Q)3=WOmSf27&1d(jV4eI4kVb1i`w>!0Bw1ki!Yh4_rInt5}bzng5IV5OC*32k$^j?*mBM~RaLe> ztli>A17~CpVC$|>nH#zL>P2Ru?bGhwByI#1_$D=!mF!E)Lr1X&keIUvZ{+%)(#O1R9{dBD)Y}3;uHT+B zL|$#xBE1U0%10_nhEFQs2HSh+i_^bKZA9ecZA(cMg^kI^Xo!jz`kO!bY5c3Jp?|5d z{{WVs%DTBu$y){xHS30BKXR2K9Oq)bQG9YWm+-j6TW{6>Nf#o zIZnpnH_D0}M0Zp0wCn`V3mnx9WxL+eYwOZ9F3@1~k zMxxmFidBNk3TG1AQk1;mb@U^%N|dCm4g(&2CgN#v|F<>IvVm8tPBoL&5oRiN!C*Msn(ji4ttHCHv*c%T3DJ{Jz zi)(5^#-ep7X$Lvpkt@mBSKBPnAx#hBTggU4NOMxs*dy$MHt?cE%1V$2WcI`E8i5k% zT-o4|ug<2kC8Zy;UNwQD0`<;wjJV2)e4Pp%wb@q$6vjaB1tV6Y&;;`&@&ImYH=P+5 zDbA+bYM%x4AUdcr7$|fg>>NsW)ST_HI6E9|M!aKe`b93G<1b2(gAv&23oJU7YTYMd z2?IIiGBPR!k|e0NmhRW9fPQ09y;X*rK}uUeAxg<4l_-!m&HxzSnbi7CZ7}KY`rTVw zPr*&EYo=rGGrZdd*(*7?7TE1p^9xFv1eB>bP{&LjwOFg%SE0VP6tLr9jOf;ssB|h$ zQV93g&~qVLFR=?32&_zOf$fg)_Yt`GRzWxXnz(2D>W6r=UZup2)U-%`BkhNafGjAX zE@di6IL7+9)>g5tB!QJxHwHX9CJ(=wDX=!xwYV_Y^|zWWA#y_9a|l^VyV_4B6%1ej zkA)Z7J7qApWE{9vr01R!x96Hni?zK|br7J9T2mvlJ(`pbFcsIsJiO^MW<;}a`}aAr zP$ETTPd@5FR>N{Y>T}bX>o_qa5o`95RNnzf-_OpgIMPMYjv1QuoeEUcplHyIY6Bxt zN>4Qe^kVI8H?1iZ#u=l8J z0U0~^R?2i7=6IaHOLZzvBg7Ytxx&h)F5cG^JM^sbCF)kf1S<%${Pit_Qp=z6Dw!Aq}c~kIX?w*h{VzB{;yq4U~^8c~(vr z2l02F?6bE~S?)d~I7qeg>tT-hpljy{T%~aoNpM`Yvn5WU##nXqq4&^Lg)J@fP{CFP z#E)be)Hq_^nB}LgK`Cj5*3@@g8EL0fp6i-Ub_yeM4iCDA+_uIyi(Ss^3DbOx8g4Su z_ta`f^lMngNlperNCzD$o*=(nxOL`PdC@(RlC>?+5JJHll#Pyl6=S%Z@I~|f{{Sj2 zH4Q*T)|)=SU5g!;rL%B~(zLYV98YS7vB8K<>!i{ym8(6*iOcVrkIZSB%0%`^ZBDWflw^`L z5l>}<=?;e|D%N3<@Qlzhkc#`lh61ajj{QyOQ zp<3bUa49alm290D(vqBlPQ(L2cCHe=DjOFG&=$*2fRxBvDoAu61s#vPQR1~naa5V> z{jO~{vZ*c(n724m6~u~q=Po0**V37>lO5DCjJAMD3(f#1D&_wGLhpz8Ue`VQO}2B+ z$F@gtL$`UrmkC+Qjz@Johr70<=&hSRCgb=OysPgd~ysFJtCj)(xPR2n&N*tJY0L@j{1a{t%%d8>t(DjBJ?823Td*BZvzTbFN+GeoJUb3fNXHe1^Q2_vCfS?B~ z<24apoAqLs47iw{u|9ab#7^?0&0R-0X;@Dv9$dVm>=2F3rP{cW_@N|5xF^<_$i@|( z`f=n)Ki(Btf1bN!7vv=gSa~UI>N-+|>h3K^!6!NK7#mS99JVIflJ;{l0>qf)AdO#T zZTre|k1!~6K)bM(8U@740VxUGlCD_uu9aFYq(E}Ir590f;}$pb3Yn7gvi^p-O7-@o zu%)lFC6a&$3PQ9V@)ew5^V}TK6bP-B69ODYklH{>P?Ur=*yrv8Y}4rB*cRAQq(Jo- z=dzR=mA(RkOAd@BStslT9Cve!?X@p~Zh7K$z_~hs4s!-N6CEQ<86!A2$vMyoJ7DtK zseRgDN~zUP`O?VrzTQhk;04)i@KJ0s$|pL#N^xwd2~k!(l6eei8{<6s(bEq0j)B@} zLRY0ZJF9BX=RuhfO*eL(ZDKR;IF^>RXeCPkDOf!AI}GjPN2H=7U^IlRwgyS>E0#J{ zR13Ia!WAs3TBN$JPUnBl>bmtYMJ>ZFxZ5sjOOI;Ca5fr`AZT~hX3w`G((kK3Zi4z$ z9u1$Y01%J>8w_(h^2ePPEe)uLB*hN8*vKRdB%Qx_+M&A?zR+#QR~RH=N=k5|agtN# zjjP%stt#fLUED`Fjq#PK)f(zv8EKR~d|1}!l^e2k*Ope^lBDg+brX}w^YX6) zZXhiPl7%gxp{RfWNjrg^v-9Orc&6`fvs-&d-&A$H63baZ)O*{j0CL|X;}jKglIzHI z`7b`?b*)VH={=fOQVxXR9f3H>_-3?Hq$*VEci;P5oo1MpO(Vk`q<$dVF@-*L#F&rdN94aTF(KSt z?7OAnuv|)8)0LG4cSa7w$2i`jy={Xlh>{~b;Yv=D<81^Wkas7LNh$a9rAmGqi&mN% zw`xIijFG(0f2E`Fmkrr3&dXElWyz77br5NT__Opy{j+H)jj9jkrScw+h zB-%8W{dACCB_Uc;6YQr4na=$-tLvt=moZCRwB7@%Fxy6=3Ul4k2Xult=g%|<&6&Hz z=!`JU!Rz{-KS;@1&t_UDbtyxpp6-L@F<@n8v#t`6 zZF<13qFE@4WiNFjNh(MQI~5XrU0N1}+oNWs`n@@?(XoWbZ9vRMQD*nwn zv}1fy3Qzw4dfK9pc093OC}*W9Lq6(CMmb`Lk)&r}eCv-sRqDL4ULJmQMcZQ}04cQ` zYz2Bw`6)j-Pfujg7nt(Rd!%POeCxUA117s0Z;B#M%DhlJk;=J48QYa1V0Gn+A|{4& z4U^*9e=6$o5BifI`Dy&CtCZ}Ouwen*{*04%nBJyT@?Tg`O7Ng{9(5V3du%)$ZED5{ z!n%XEIvn(eYe~3Gnv{UFN?FL~g0uW+ZYA3B5b299K~W(nMs?~W;0zx+>eF#-5VFh} z3uZ5|DZ4CZS0xOhl`kp57%I*Q&fNKCwBJGYcoxaGJCd7i4$EF*yscm% zvI$xnE5RTWr%?c_PH~E>;T{lYhixvg#P+GTE5xQ7RCzI(QP850N)w}I))my_Vb7gu zb~`QM+YAU7Z&yO&wuLyfD^ixQoMV;;nH>H!qo$_U7ff{6T%TK&ZlZ^V#=IGx?R~fE zt!@q>)w3OyrD-H&q~scYYDR#NHXBnGo*uX`?`Z7SJb-MHK08&rw(=4HUZ4jvMM59! zK2)aK1F6rBYGml^WPyW&Ot6(LKw#rM=!@Ho=IyqHyCL9!b_GV5h~zuPy$&Fuz(xXv zM&md#NQo(=rA9uYmeg<;sAHc)Q21fDntDWsfJ2fL4`!2)oD6g7YEU6%@oM<|96wL> znZ|7y-wD^^O@iTEWvrwe?0Hmm@om|s%r=^iwWT_gr?beOSRHHLAbG!~l-1U3h)|Ny zRASo8VQb%3R5_8h2jFV5Oh>2NpsGw)jIxy#ttly4PIduoDc}ufy#!xH{9mg?2s+ZTBl1*%39Yg=O1TWV-saXsL0DGbHK{ z9E9t5>BCA_F{xuw>XK@Ir5-8Zi*2@5;u0+rt~Yqex7=D~(v__X8CEf!#y39f^q_tg z^o0+G_)bl$gCex8d3IARwjOX3GLu{cw(wDI+9d{`MI2vf$ z<>}4nH^fAb6&IUo8A4YfZFWvLqC4tuIqn?n4*qzrHF`SDq}S=}+lh$0m@)N07Z5B! z&5DH&gjdpuaFB7!r@!hpTQt?S)K^`2WwaoveR7aDtk&f!;Xc(IqSFoywoKNXhPU1BVOnrV9o}PBH*k~UrR}^@x}JfTtN_KE@Wj#;v`<%wqJOm z`+8WI)t%U}p~O*r#}slxi2!GOU;uW>@~@liyhCxYp{TCH>^mm=G^O^|g#_}(_^++z zHF6P_sCOi-T27wPp{V1UwZar6$ zeH+_dl@dEs7O?w8Nzs5o9P*RMXRg#Y#9RdZu%$64vIC?Ktyu#(+#GY-iX-siZc3QD zPZQ2^-5Jy&i3>s;Te(kt!>3A|2vOUi$>)lJao~%)umkULHLhexu7#Nris82Jf;lU2 zd^S19O1I}+YK{>E_l`?Va4WaQZH!*gC(^?(2H;LoEvk=cyiejweVNHN=G3%I!>K?? z14{HK8@4$BNc;v)?-vdp?Vpi8_lr%@l{TPIuu|c{Mz=Agu#gp}2PBcli`tVNWpjPBx44~YQo2K{Pf)%*3tLxkj8)MlkNf^HDm9!`b>|8R*yDeQ z_)(mM_m$zA9k$x-@+88W0l86;ZF{tRr`8r$J0~=hsAB{kV2ZaB9#Wcd*4yk~ARr9t zPDaG^KMKs}@Bkft^1P?3qURC;X_z4GzfriyH61YKs`}LA$Z?ig?y{Da3YC$ClaY`y z&>orJkhhTYO)|G-++YS=@4UG$gyAJ1YCZ$_)hegF-PThvm_qv!O0wn(fJ%9JjPm4a zWtwC;?oTY*tIVrgvDw?QS6~0fxnON zqWdE+HN;F8RNSS;Qk{;ox{|CE909Th6{ASp=g8v?eQPxBmkxZk&QIGXWuHx`bxNKM zF>YpeF=z&3a$pRidDi(B%e&JYvZDOlxUH~+>Om|iAcX_3144<(Bd9qXsn{+(WUxWA zIQVjNdQhT*P60W<&gCf^1LP|Gpri%7w?P17eev_ERm)Nh!eoTBmRL__(}BUc!6ze_ z=9f-W5CnU^dNK{82LDG>S$>*=80|3m}M>LZfa%apcWj#&VT{P0By*7`DflNxqT&X zek+DcDp^TcIzVUzg`FwJGI7k-8oE7hn!gb;2|M+{8*qzTJKa~O(-BaKN)3jiEgciVqG?eo%ujhat{k-7dwN+DS_G&V?MH8{==09JIA^#eG{d}sxBh_^=`m}0il7L=Bo3&Pcn$t5QPIUgZd>6f~A zgm)b;_Uflvlc>Li+3)O!O4F01Y%9#c8SS7$r0D@)SujGw;!s z+k+qv5Q4AEH5thWLa^vldEdT*pU#M$_-{y1-;kskEUVdA*!b7IqEAutqAnnf@wew* zcn(|9&(b0$OO9kkvb=?%o=JL9&t#LG zfB`C5Kh+y=Oz{(oMe<>jYi;l)EH>tj{^~fv#y=X_xH8)Ps>cG^4mm`pof4%wf;IN) zzyKcvCj;PmS48n}eK}ZaaY5nA)2+9+Uyz|H5Nkk6mZZ2>vXoL5HW?Wh@uJpAu_TR8d-7X}yc18|-mcphsb9^>$F!vyu`k(;wCm+B$^o?4F{hRn2pyzsK21tD5Ia ze~+?(Ex@(l4OpnM+gtG#(j6=;oP~mabvJgnK(!?*pDl>VQPPzTo*BvMhjZ;gPJ6kd-Mz8k9QF$N52C?EEdk z?A(ydc5hPVHkisxWT{Un86>&OC`bWY^$8tn_C?+-&MIL^k0v@D0I;{(C=UVEIr85J z2OTRO{{SfnvpAcMT8g*5PT6RP5!(@~E;@3e5;rHh2TWk(fnGI@rohpGWu}mj3Y!kA0Bce4R^)m3Z&zph;el&Ivz&e^)bj5N6!O{lsgEd{3pl1h}Ub52Pk;Z+aHdQ^{Cea9Vj!Kq7H ziBi4f?@_2KJ<-MV{)PuA-BG9bhNhiPHyM!!Gw;)5>4kkMw);iQNL~^~3w_d0j(T|4 zu!7fdW-NAIlH`=@L2X)GPl}3$*d0mo@!mbL@XTQIs!&4Ckd%OPj%4TGM!t-UMsGKn z^3QrKcVGGTpWK_Ju_Vzo)dI{2K__(-gg#=>*lRZ_3C)8fy8~M^rOya zj`(FMb*qKtma&{R`j@0yjH`n5sjqzKL0psYyM= zY&%720BR#)*A! z>Y3KpZUTnIAw)UhxXuk5LgG2DUZUTo@U*y1#fIV&5iYi0btHmFZAB|m6Q?6%;xU6+ zJ29WO%}X-Z8K)-2?R7AmNrMkrmykvvJn`BExq@?{Rsa@xc98Z*O z=24?Ql{gZ3N@fQe0~it^WDmg9ULd*fh2BI-R@pEmG}0VHksFdDtm^;=5t0+BJM4Lb zUp2LUk=iLwB|3_UJ90Ux4c<|&Q-~g8W4D!cSmM(wGagvr;J%f#+U{l}6Cdm(p|?w- zJZT|L45v=4D3;3b263^;#{AousdmR?m$gS*{{UScB0xQ#KvqydLD;KqWS=_u@*V6n z=&02uuldCx&j`e1h%%D#TK%YBujKCWWwq6ZB zpH*F_QD#VxG3EKTC2aOSA$oUWWllF0B}GdwsI(L?a8g2f0y!LyjZ9~R?DOX?MBW-@ zm8&uvn9@5pfEXoR#RZd-wtL%;fQJr8y2-h`#3@#Yl5Oisq$T96wp>yaOJ!eaK{+@X z8yuagQ@)Vyyf-7Xa@lLSvZm9i_%c-Ecu>ejR4_6Ia4X$LzfWDRr=~LCJ*MLD1i=Qt z+6OOcKV+a}?c>YN zsTIduEhH@}n1F%^A@)eysN7a9R+g^|VbX7$^fu3xrE|2*W5hC;Xl^3W_Q>CEsst_^ zTt}XD>AKSB8kEus6rtBqQj~Y+u^j!?O>np#8-B}i?=;a_YDNbp+}xab5)Z)D+?$UK zKaWzG2^kj-e_*-ACs_LSP zg}>zcWZvmpPr7T1wPQO`s>s9Kre2xPk$f*(OnaO3p%_D@v2p0muv*1N2#a z7aki+ZVhtakr~`2vPw57S9^Y2b=(>?XSI7Q2c9ZQH2@@pbLH%gU&6D#hl%xaP^$V~ zAY+{ezvA5XRQB&#uqDnOfpBIrqPnFX*+6P#XDz8Y=Y;o28|5e8NBjc|)Z$FMLqeZ! zc9*i^NDQL~0VyXt=TSRz#`qKo#kR>3p0-@xX_m}c2xdVGE7u)FZgY*ovOm0QP@g^m zEe>3R&N7!fy^sKEBx3_?;P`c}U&Q`kaJGwPPwYUEj*h^NtD#H4U~`BKoSb+2D&Y=E znx@vB4aY&;YCYmPVO^Lp9h*lf!iiQg2|Z7U-mOvGYEP^s4D>aUC<#A0!eZ&7+fy`goKfIm5_1;|NzVt35&T10dH*jR)%JJ5V1Xv&`Z*z{rk%(^xE#27Bi>>0tub$}hO_6R{yK z*=|E=-w7S)$)wHS0rzRyB<~;Q3ULEaBqS9MTclM} zPcsNTHr^)s?8PO`4}tXyN`skFl1`!s9s?Z?YD2fxy9wDYw{L3WAbPyHv71nF-CK|W zB_kvaMIaSpe+r;89aYVs(CpfAwV_S|077)~AntNN9y``;GiqOpziqzjZ9NM$51LRp zoAZIzXVcMa+#7#4yDhFW<~uNBDYtu6WUQ1Ok&Iy^9f|TjG;w^kBJUld%#zFOI3AAb zMy(3~jltPN1cE#dO3An~ZN}=vr=;4%D1$;{tO7exrwdj*3bzN$?r8r2g=M%}Q!ltq z{laTydhe+3t7*qQ&#H^{6oj+~X@-I!@#tm^G71jvIOC?V%`BPb~mo4oW-x zLGb5ZvsC6Vw=?`luTDyewH;O5PRbUzN9-OeN;D~v1(MRw9QV*eUn%BmlA6IOV0G#%DLxiB9R0r zLSQW=D(uscdK_0%jU2Rt{ZU<9r(~^z3b)b61s)~iiI=&KI&F|0dqW{)A;;2A2_6Z? zdhd@4)p#xdGo6${2xr;zQT_w-su(iT%6?sM_29?t<#;31&d2ijQ! zspnlr;4FtKb1Nx35ssjamEvMyWyo(XGlzX33uzWd`QQ zP#^Cb+#iQJn#880x2dO+vE*rmZrIcT8QV4DV@F!TyAYNA{BFfPpwDB4WRc^mAbbT(;kdAEoINVZc6BYZ;+EfaTWMZSl;f5& zjC9(IXF!2)jHZ+lq=W4xT#ZsmWi~aLQJ87=Qqe(4Atf8+kWW10ezm5ysqUZ|<+m(a zeLZDSr_)sV&L58+uK z6G(PUcW!Hay>(dIQL{A~ywIWric159K%s>q0g7va2iHP@q5*%TNa|733j3zQ*eStXfV0oNYddi31YYgKg8mS(@{T+d2f22Xacj|6E6n;%u{zq?LS z5DI4kmAVBQE(3r6@b-fmvJ4D9vYypYA>eeTA#cEvSM2%6nXXm0nlf+BzH$<@$krQM z%-=obSg=?=h6 zQil13NAaRtEpG*F=j(`8QJlQHhXTgcr^?-21*2W%KfGItiTO4s8~QZN*>@$?{>JA~ z_18pu*u9-!tdeW#t8dp$o>eg5D*k@B%VLSOU^Vy9^(QR`7-n}bb>GX{Lzl)yWLL*! zo-(iV7KyT#{B`}6(0XR{3&Nou9f1t&9ZToJ5@KVtvJ;#)GkCef3I6wYujd zD|FITN37*T{xF!uNBz|xkLmZm79{Z1D#aN7yN^fVSgGlL-*eE?wVk}c?s{trw(|}@`O!5`vi^L zbW)QDArtL>tDDGUU&IWWF|Rw=!zQM`At<#U+WFDtWS9>%oxChfLhT)5ddA%Tp#rW( zrV{OvxwhyOyX|~m@P+0oS~sH_KId3zt2nx1{Cq=SqG*%sB4t)bO1l15YYyEYm+}qR z+(3!@uBxru8&7|zqsx%LoAwsw_T{I_-tTu*ndvvrtCqhOt{wVqyDmW>2g^xDa9tpX z$?V#jJuUW44X9Tz&25%@>F|)X0F#oezjlKadS`qhbmLqiGC4)RwaWn`frX_LsQP0t z0~vG`mP%7GY;jOQ6flh9^?JmTI0W@abliiyj7zmD+B)?=RR{0-{@>p8VR6* z83;vAwejEM&K{txr0n2$xgMZ+@`v8qpSUuR243bom>$B62$dM^eom%Qe~ZWnC%5=( z(`FVd`wtNOCU!8v-fkd^Y-`=>`OEZO7gp>`3|(Q8=GOt}AzSU-7-0!qhMV(Dxixa@ z=J$$9AyNISjb&uHQ!uf;?<%@wk%^A_=1u3HsH|F7$Cn<}-XXikyF;}RSFPkxo2D>mMwLY{H&}3`sLlmmex$@D$L4NI@% zxL*_*ypEFyO%|xFF2?Ax00bAR{~F_ezUDlfJr8$Z7fM!<`BocjKo+u;P|7vx7Ce+r|)V>LX{_-%*3_kK_OsnN%|nMVCL2$t{{ug1bA zdOOOK(gXSJPt8pneAVdNA|=S+9P zacMo9Mgq-3UEQAqvqZy8RSH859hQ#|a&F(Yide60Wg;YGza-G8R7fN)=v{F#V%mC# zZzT6A+Hug@lV3T`o#1__#_=AfudFHjh%l(cEAe~b#gBgC^y0kpQ?SX?5^8PFvo;P@ z<8M=6KN(UfVLJC~KSve9vOR|@X6mncG&TDT(^K`myg@v7gU$2!eSP!PeZP9vwNt{f z&<440DaWrkH69|YN8_`f`{%8YBZ<7oghmK+mv+{`ZyX;2KM;Rs5dNCU`ffHXb;N}1 zMNnd?{b#sL;+A5KF>pAPT>WhUOK@TFkbzF>oa$Agw$1rr?g`D&4}~2k@!ax>-Jmdh zQLn452y&Vj=ElkvYSyaPF zqD&l$>Nx`8_PZL~WKY8Y#Xwq&px@gp6KOAskY6}0oS6AEA6q`0`YwTF#FAA@)zww6 zXssUfly3#Ctp#ve)?}6kVME_@_Z`2i+j^1fFsR|ESR@I%b5q8->VjFE*sz1XQniR9 z65YZ?!t>>wCq@jyXlOKZPnj@tDbTb|8^Y#8|_~3RGp6CO>jOL67MnJNGgjH1p>^awbaKSvhI!!gU z_pK4cqB`$)I-xux#yDRGf&EltxS>f@6tAw6NHNonK3)GWtdHb8$Mo)~7S|YtXDKFc zX(HEVQj*bYfon}kfQr*@_aUMsX_TOYuXm-P8%3itw?xhuqc*Z&GQE@o8&d%buO(=# z$U~+ePUL$Y#NWPrG(}Cl1j*i)x zqr6rxGbUn!*G^!CBY!pbF7>=p{XG0KrW;bRP!;&}%Ptd|oMMEB1M&Rf%%AoctHg_p zuW@|qYn(!BRt%`lD?#i=sP*;_iT>Ih$;T-vEby|(1zUHme?NZ+wzZlFspNYX0*{yz zTA%?Jr?v!kdeAXYbyI&LK7HTm6&J^W_IgYah=Zk2PQDqwxxFdvjDWvXb2STgM_e6Fc?FWd9kj=lD} zdf8tAYz&+E{2io(TqAeu=C@=za4*M2iE0=r2>+E&SNQVn&($vtn#fn*$s+=WkH@-~ zQRkCe!rww0-+h(QgF52Q;Xmn@5EAx!w(jSn(Jq-f&sCE$E2$s8_b#T@1z?VlGVpyQ z=A-_FHacwCj~3*T$qCLU%MVM8u~B#=_@$+L9L zNCq!2G@xUYGM&!3-xY3PEmVmWQ{}t;+n;UzMvoeGI`0ZUI35?#I4il?0keN=Q*_eL<# zaU~5z$unpS==cxvLHekBT3d8=soB-&i3AEXtBKZ8vda4J9tgy%IZN}>t7cCX9^p>^ z*977F!!|yW3B!>>x5$J&X0KDd zh#!d{Gm}uv-$^}uc35CQzp8!@b^0BLuvWB7X{r13rv5Oi5H3*({=HM{%2xmUyf#>d z>*Y6->RtmR{j*wamu&Vxr@UbRrrT18>8YVsS<`|!PGWi!^!o{|=5D&&W;-Z`wXML3 zoJ?RWr#IUE$tmr0=*NDS0qFq8>i7ow=add_Kl@H5f&0HwLt(j6D)>ixz@GQTeJ_?6 zKh0A|&1J}h2p~JN`%_ZSt~BGF5EJX~yKQnsqH(82U8iXr2_+R5R;+2yW}XL(^J@sd z59O<$v5}F+6vP~$m$*bX<~2u>$%3W* zRJ@VA%wA_$gnIXNw`zZY&&yMBYMDlX-&0@w+mZCv^{Q5eD!yY$JQ)pC^P;xaWg{%q ze>4h9tuXVRE!*4bCOFyiNY~HA<%Ftfo(i>h6Knk5*cN}M*Rmg3xVVf^tse=;hs2h* zhpDMHMLcTbQT;qgKtQYu#{%cevrVuDz`F@8pWGeMHS}-@!z45Tr)JO=?lu{gU}Zo9 z4s^W=Wh&klp^!JVc7u*5gV;B}Zz(Kf8tw}TrXk69Nym}~aL`9cUUGSt;`gCG+qjkB zHxXRJebP>b?1-)S>5sJ*7}iwKv5GW$p6Q|j592<#*Prc*G=_@w+aE?b_}0e2ip6*R z)gu%}>O;PwK8*(Nn#*p5%babz+u{(^@oy5$x!=c4?Tp{c7mQotU&3*np zD59nwk#Kr5Qi!rA$m&v)3HzAf7Mrai+eFcZQ5rUy2*Y_pD0?Sva`pT%_| zFGX~*<8^4|d40O65WeNo6wF?TSbJZR>r(uVwRkp;>tbC++u2{3m|%>b$-0^G;{88> zS`&pL9c}x}OR=`$^lyFWw(3jbY57M55|x$dB0QI*cu!c)TUrlrVnaYNSsE309#*F| zKMwb_>)XR>6H&V;$Jnz77uId^_+++9_bjo;7a46=`&#HMa?c=g^tLErgvOM?6qt4< z#BZ2U?WMWTU$W}?Z7-#_G=2vH^KZjf*g`$8Vlbc1es0q293+!yuGi6w@7biUk1jE) zUt%TuqO5|XF|6|tAnNfZJpJtvSsI2&?Qq`^;^zI9fOF6^FbjdGzV&7=8T2y^vipv3 zCsa;wu0oo)y$N+9A}8Bn3QX{q3R4No5Hf#6KoyRaH|_d{kzNbpb0je3iN+??*Ritc z%yLNK-e9{5l~Ah>mbUM6GJr(0vCNhoOSxZ&Xb4yn%0#(9?TevN)w3h<{;q^FXu$6U4@RJJbYBWecT0d$vtASL@5(a*DG&su^+$wX3E>FBr%s z%v(>Ts)gWyH3Ho*ScSF|1BJ|=hp{TGwlXPJ=M*cUGsyCU)A97#e8R6I$2A`$*zeJ( z2+@B4g|3P&!g?PsG7le~Fk1)V&r%h`&3t#V?d9#QLS0W6Oc!1cL>~5&-+3W}i|ePB zNSb4s1A4^jDo&2`!0rYebh6J_=fMtj5N%fqGjsaWfa$+6&y_6ReLX<6Ylb^fIef!eavS1Hq_P%gflha_FI3`p! z5dHSUVaBxW??j0;Hv4~wtkLRq9bO8!!t_Y>Z*aXG)> ziY))~Zm+4_MI_wci{!~A`*HMg8V3vhYN`ReuPO7X3OPYwJdrTrKLDxmTyDGXGV6__ zF1mGWWFE0htwQ9KE0k`=NW=R5R5VvPmv30Vp`>@L1DOYw^u(ifh6(CVs37tgzJOI^ zv03?@;R9U}_5%ELsHYv_dr$UTZTNx~6+-K=%+T|#5vO=slbE~FGWs31)L3>yJMK{) zVtw;`=!8@pm0HSmka~w=A2z_tj<`Drfwt_{Q5p+^BoR#vOHcL?umP3#2oFeNjVt92)4f7YW57?LcR9Y-xp1dGh&)a^obq1F2>0Ck4m%|(^JEKUWR zT_0ulW?k!;iC$SzUYOXY3JJJxX7u!zr8ltd3|EkcQQqT@o&8$cJ1fGB(UUkQ1so9~FN|K)I)I*xVofOM!L;9+_y}*cShqEQ25!!&xREu}^!JOG!lN)jD(i z9UtfrsR+kT$oM;UdKuS7q_sIEZ3`!R`5Q*dRYs0l6pnY3gL&-@-I>u1b8sZ?RhcmF zKf<#VRj|?JY(#AItEFId9SLy)8v8!Pc|hlrPXln}CJ_i>u`&}YVIIrX2~Wl!JmFeW zg{uY5{c##gq8F;ZpL&?J^ig$t#nmSrDuVkEe8d%Ls#NIyF{am%R@GdkhqOO9myM!L zk>ndFbi3@8nsdz?fMoB}_Sgz}J3#sUXN%P%I=5+RjyDQ&49M~+l*jS7D90QN z{*lpaqKng;Tkh;JUA$J!x0+>t=Cf}m=X})jr{$NknLM{~2k4=3pHbZ!Q~P#SRu&d1 zEvB^qL#ztHUxj34AVuxFL+z)r^O?YPWh!%q(5m)xq*D%ai$h@2$1>Q`^FV?8oH~{} zwoS#SOe=qKqx5P0^L{WsB~X36<=^tMKJl{x2Ne$tNheV>7 z{>S>Ya10!Y^i8ADfwW$_6Hjd;(OqYB`OZ}`>s?5re=s-#!%CkLGL{=3U*?e%DE|C= z6HP_7ZM5ey4^KZde~*U4!?EcjSw5=}9?LE;O(#Y5;n~QBA}2rXLO;pR;4+)H~my2YSAKN%5eb%uW3pU5d|Xri8BSTtxFrZ3_tji zyX(+7&VL_!n8ePjJ%r61zD>GH_h=$|GC_vPgL;{-cAQRP`~-Hc^^@>1{X@0qs^lnIoh0xGma#w8TJzPq1W&D!Ig<{* zlyRPSa(3;%=~uaq+T~3r^7YU3%-TqP9tkdS5lL^I4vC-t(;d-(EpVm+k+jPXlsxz) zoJ0`lezlQ)d*x!YMHNFDEE_>Z(!s1B-q#<&<^;*@=DtGj;@ccM;xn4JqS!W^Ks|4! z@%*M~f*u(eBSPnXl3)}~m98~6F+Gn}%$|c=e66Vt6rLu@N>TkYN4U`u!U_F+|Eb z+X=X+;@7IYPmy5wx9vUioY6nJ4;QenjpzIfa|hHY%AJ~HqaW~KF=W+*D+%Qu>(b+gh+V zmKL%+ah<~JH@?z}d2)4rTqY-Gu@74Z%;;N3N?+XW;fL_?(y_PvAV4}%(5t4jl}D{p z`B}ez0EV7_0A>EkemCgM+cL^MGR<@flOvo*o+KD7!QXu;hfBWRc-d;a94T5k)D+XE zN2EEj+P9P;)w?6+O36W=3PPXiX~cmYlVX3^`z&1T;BajHMq9z#bmKcBL5ib`isgHB zUp&{R)P=FdV0^Xxsfx~Dt-YJX$vqjW?$ zhwnrudwpPr)K18YdC-v13ZN|78psb_ev0@&GJN>z^yNZq(q-1J%4^Jbe-A~>mLs`q z5VC1ISxWCjChbzL7Zd71b#AEk3}FTO_Go|vy*rRk5G$$zNly?(^xU_;budm(e`0+* z1t8HbpZO0!r}JKW0_zRs{on2Wk;a_425t1MsLK!T$k=ar4~>y%Blh?_21WT6e}Xhe=eGC1?o0TQXPCPPrw)EuZZ~tH)h7s9`jun` z5A1Wg7^1lchp>vB7@QfnAn9a-BYe|}+hI}n{p4ad&p?OQMTHLsZ0q;gCOuoPb9HMB z<(Rq5uz1y9vNuPi0r&xy-`Q<-rVI;Sm$l-_pxE%Q?@PXJ`i8kHcp@gL{{bv~)Ko8j z#@Jr&)(-(|U#!O2m?1Px2Jv?4mUa&VpVQ>6mCh_6QdBDI_ndS`a81!RrDU(B9QjgEu}V5 zkYypWec(aJOYnvE(i$!2huvKWFO@=%o!H6WhV2i0$`%Ta`8g1nRvDjjlzA6=Dq5dzXEW(HwM_~WVwt&mC@r|2G_QAOVuJaMoSw-05yH4UJR6Xj0cdH&`wkmj#ssl@YWdN_H zbN))t%Eka;7W$)Q-K`Ilk0IUv$|4h4TVC*kiwz{C06Rl7tfgZL*(`}NKYZZpREqPa_jJjtV80(tCp*a%^O3$1V zcxC3eu^(8vPd<7VsvmP>%eXJRWK&sojdj@}oHQ;wSRUlAC6=^N~(uDUW;P`IU3Em9EA(S~BN;f3Iof?{=`SUx8<{Z-q315~zgW?FM?8AK#L z_Ieu2-Vd>CVZ3Q_Gd^r!PiC(UkDa&3h4mfiy9z^_72bzqc?bCmO@4Yrfxa0uDCFDG zJ{^!bTV5`Z*V@cFKP|@e_W+z{RFNO*fWFH!^sM zHO>v2#P5=f-nWJm^n6;&l*DgJq2rOfsz~SO-$3#Z}5Lc3;OAfSylXkGm%wX2a^ zYBe9y2yrFQi$Ts~0Q}#@-iEV*sdA6ym!k6x7wk{Fa8|~W=W>OolYb20bd30E`h|Qd zUjx&a-K+Xdn1pCIw5=4Q@^5g7I)m)v<@8nPNQMbcVwU6Bx_!*$>fYuRce1C9hnVDK zdzOAAX!*Oh40h{*uiiO3a(zb*SkVg=T_;Z5Oze!#g!k)4_fx~g`bp9;P6}c8?_)fn zUkchr8f)1j`8seH@0@5^>jj5HFYq@>fxzyW)Z0fG# zFG$yV&2qC zNCJ%vMREvgfjt{Zw7%S8eq3zd)IbMI*YD%dkA=oXuw}rnzF-k54>~!_ji6!~~E>?Q=5pQ3Y zxx8pgZ4T%zHb6y|R8|AqsX%)y{22JMAU(!^2BrcLBA3O+oax7~JHug#>(8%4psU3y zRzXaDclm!^P|vUul#B62;4P%^X5qTVj>~O2ZBz*-RQ;3{>s3?p>i1as`nZ39eA~a0 z|My53ei{*W^NP5~MSi_;yyJk4`fi;^=YaaxYs3V)Rye=NW`;y%(vQGgmRHYWYMy`! zhvnI)CU}i~bpUq=L4TTBLGcx>;$U%I^_l(&loFW?kOu~RU-k_}i3-qtqOG4d>$MlO zXGAzDaDzNR0PF)Y3^A6(aVi1zS&N>p_^}027z8@*cmwuk73T*(44^P0IizIoT%k&~ zPnd82gDmcthn0|w_O$Ld48l|zo+uOW-1qc#M|m;t7&AJ@;8!cU$i1|W!b$OD8Bo{| zad;aRqp(>0i;w;Ec_H$A;=7)qw1Jbm^?I&2ro=hFS2%MofeSVn0k=1p}(w`!Ua+Ps20s-pYiE%J&8r9^~cMnML9H^D7Yvw!*X(L(B3IF`qE3A~DHdy~#~R{_#hl+9C@M%QYQcWRcdo)yW~{ zf}g3X(pg?9H!XZRf#9!fw^3NQkNqT_YIGHCXY`xa7DbzUKI_j=axEpS=iLv3 zVt^O^6iSg%R3j7{`>Zn=2iRejFJanYxN~aH?-AFN7o0S~2=~EHC`j$FhwM@mI{zd; zFoExPeKH153yYiQyuR~qtf7W2wcnjJ!Zh*w+}T`DC3~6v2vx?@HWzn-@_RmlT{b5L zwF$V(>$dA$GUxu2*B)XfRuq8WR+&Ls>9=OPfAzIfAB}vEm z2hjDLA_F!Re&%Dg)mcf5IoOS{&Mr|%2R`Id5Tzru{PwW~FP3-$ZM=Uo{rQ=ATC)WD1L6#XVdJ@K@z8+3&4 z+rvU7AMv?eqvNg^{{XC=zx}9(H$SVS67se=)SEDzO}dVmsTveCQn&EZI4D-2-)bi= zvY4#0UAdOO3*Hd(km@AY)P|2fY@@~713hx7Q#(>!TI@8*+E_zaGE|GO{LAj-Y43OT zLktvCL?o=ic9OBrGcFIta9rz}vc5Pv&S$?E>Wn6R<$%l`Bs2nkEcgeg$S3C6sx~Mz zWs9m*t)C%Gvpqm+RVN8tZv@)B9t|Bq<*>UXCdJgBb{XcaGLHo8?K#}4Ip zgnhhOo=M8T-F%FM(+MEZ$eU=4hP1q*lqt(r{ia6eMB94aRF$_qUtvUNfOVS6wV^K{5eF}%h z62DZU>A@^4pTl=`cQjq49fefui8no^GN8$oqz!Z8b@+YJx)rIk?;!+D(y`a1T3qHk z7(gbvXcCpT0&`&=nfr*3pAD6*5OTJ_swDEMdU^=;$eL&}(=%ei%cb-+GIJTDb|oz8 z=`3bAZwNq=VFb(XrCd*c$@Xth7I{Q0-(m}sI6YS>cPTZKS(B51kcg$%XyF<^U5~Ec zBcUiT+sIAhyII`++3}>9shlsUWwxBPFh96hje({U2ARxew8mb^&uA0Vdt?x9HWZ7G zvyC5znUSyV90O~mEg;Mg6;DfWx7T`qja-G&D~+q(Vr1AN{ns=mY*u|od>!6LFc5ob z*lR-r-@5P2I#cC)X<6!~U0mGIF+_R}8`L?Tyc$w&Y65fDkaxx7jaJ6iocnlX0r7;bIA%yfv2-~90f>t7^fQ#k$QRzF z3!rS60&CZqt^v;9i+M$9ykNS zfx@jpbp*VS^EI=+1YRZ)Qcn1J?B}Ju^SV>5@>m8n%XnP8F}AafoKovijZ=e*kJF?R z8?mk5Uj75fH50q74!9_Wk^BmJsgfZYEkhz77@HTuNM|R|WtE~S%J41%h9%jfS<6Qx z4SAoq`e)v&aMffl3!xPD{7JqkCpMv{&=}Em_q%a%nwKgtIgTwGWvI7^^ERHt=I5M8 zV1rFBQ`2d`1hv1)`z8?nG3R4fAwFo9CEY=NR`g(^tFTs|Of>UTZc_Y@dO|(ylSElAdNesfRvs%!HN-P<(Icrec;SXjwMgQp zXL5_e-lXwl1iat95^$9W4Acl=Dd!LqAxi66SFdpF959RD$g!(Evw`bFCbH$^@K3gN zYW-I^P=;wkO>5c0NH5hGN}&{M@7riJuMcfoR&^i9-HbXU@F4K8}Ojy$s< zb}KgUj$L_XvJRy8njE$+E}{^>M|eY8+i`Ip971ZeicwneQyqm#@F>^rzx#X_eabnq z??jBmrs{iE4ZqbU914uJ?KkuATgfR=VW~NynR;!oGRwR4n}b!)8p|tGOI@3 zFc7^w_Q^3v%0lygYmIq!b)5OWP3-yalRro1#9CleSqG>#e4pgsnsk-Y$xD`aI1i$JYCE$B>T zIhmA{eZ}GH$x48~dkM*X5((B(oagYiN>p&dA=lHZ!fW*xBk4l_0K5AR{{T;>?*p3> z+DkZBqhLcAny*xrXDI&f`g9dHo6pXInWvM;5nrC8A1eXyA*YxG+_X!zIlh*-3mA zJhHV0>YPl#>MzsUy`&i@JAcYOatI+?X1oybyLFvm>DaX%$9(c_tez_(O&L)R8WRP5 z;~>2IX?VG^I7;{;c!S<}R?oOgjMj|Y&|Wf>rSBi0rZt3giaDAonJk(We0f3UnC*R5 zN8DfC;rSb?$J1V&?Z_*irMy?+;yDT|76{Y*XzD?Zu45KX>tQK2aNZJ1h&9m`j||WY z7>)I4cBqw&ra~57!(UjTTZ>-_n>jnP9KqjG$n5W?w42BWF<@&}*1mpM+T3ggi;Q3! z$Sx)^B2YEFSjt9V#V56nJ%=~1CB?7JLdLYo_>j63H+%TGqTRHJ6z5I#rmEy4HF(K>~teX@#Q9)QGtYdPbtlLf{yRGQlP+X=Xo$2)nEmQ;^Srwdr6@QQunVm54_ zcJHFo75vEozNaSNXijbIcu<9D3KL0(5391;dZCK+-%999KeKyW(=-|v)XpKd@-5j4 zY!p-{V!X66O~wslaec|po@hi;Q8Vz#yz69$M2m!fb)9B@pl8(+`%Y`eNR}!0U4{Kz z)u~>0@b+nia(!|U^CA-nL@-7$MkM#BUaPd$ih{R*#z#Cfc&?om_a)EMn6q0)WVjO< zjh#ztHq(z+Gfp%1KPB9{4M(&6IC>K<7z>@V2=qwp3~A5jr>+k-xvFPzSo40k@FTjM zV1K)Sp7XIc)Z7idam--_u>_ZyHDicAIpJ^hk~){PYBnNu{a!l(f_R8<_h9Rx_cTF zKi+RFN+xG147f{9q1_dyaT*2WNAP&$lWK#OJaBWPR+7RzM8L&ur zMO3q41-#$uAf!-AA7A&Va2^(L36%hiXH==a<%Ct;zB`X%n5T#Ztz5^{V8~7tGN9CbEG!|Xh%6>3d8BVD>SV3$GgRVxV znVP1Hse9t2q1%PfiE=DMO$?jxY%JkX0;g%QtDHkA^~WazGJT7R%lzUl5`x%Wq2bsW zJ`jU*%unWxBBI4Ep*(C9OZow4JAhIbgSX)`s&)KhgIfw`r^56dX8>6WJyYLPz@CDR znD1}``E#!|2=bGN*ftM$tM9174FH#CI2lviDgLBAr7E-dkq5iE;ogRnKKCM_XuBlh zevflPdz(z*B9F_-P@@l9fcA4!&Nz^kMmcx5d)h#88Vnyy3e+q^wNc*Ql-59)%HsBd zq=8LAf$}}caIpxmmk#{{UgtAG0fK9HCAUnFYAKYftjJW}B^^l-T;xi7z^p!mIWPuk z)GoipY8qbA9_dP(G$;I;xpNLSmNl|qn(%Wsj&7!!c^{D`qSEcw5OpexAX|B ziqynQ1x|LQ_&Utm`m#%FeMB*^;t6Kx^`<~8FqLv$>~)WQakzm#tV%76{PQ&v;g2FL z(}Sr|m(_2Fv4af6IQpwq1C>o1DShJFFOosZ$JLQZs-fF9gNNzN-HwJ|imeG*{f!K& ztAbgXu96e$c{25in!1@kmUR;>7?P=GrSR}1_7Cvb47^%2OLeoZ!*@`ypS0s9Y_V9Z zBc^AcDD(W(8oZ+TS-X@)(Hs%_kvEj)Z@&cApX?O9IwMi$d9c$w;?p`~n4Z_RV*%u|L4Sjz7jEv=ki<9uk@e6kI~KQO&Gn!U+%_ zwC9zYsX#LgB7M`Yj0gCPsk)Bc z8v90_yg*`1)4GA8*d@0NsPMjV+{BenrhE__DG$~6R3}Sv%NB>io7Yn=cwNZx2$I)C zT1L10T|WkTx(G;3(A;D>H&bdv*$nOl4{oE}G;dgY%Q?$)rxCV0dqN z-S|F{%v*n$;3~&od_tfu=6xbLix=V$;O@$IfMhcHP+AC^=vo=~^MJin>0HBZG5Z~aA1AyGnoJkL<6VLURi zyfeF$w#;EWjwA3w&OrI~rL9h_yP9w{qMiddVt`#LZ!R{J?KxSUVZv4;1jDN))>x0{ zOqyY_rpHi0xyPkU$cVpOq?OXt!_mD|{opEPgu-5h9@O`$-G7&nWglr4EXK>6{WToJ zyKbY$1^^k3xy^-#(KTFpJ|fvd<($i+}8|35N={orq-%Z$)9+nQ4}V??I4 zb5{GPjSmEjkXQ*`|5EY~Fq)PBNKhN^Q7NYF{ttlsDgD3-aSu7%{V~tCm%uKt;f|{} zkzWjT#0bG5%Q$g#QsISUzZp|LyHIpLn7+S2Pp*E~>v*-lR_Dck9@FcHN8$I!ld(_s zZ>%W!={MD=u7Th;(dbkcdFy@K>_X~+NE{JQ%jFNSzetCy5`&+ger{Jhuq=yfHuw(u z4I6v6ZM2+Bsen}@)hO`@mo)i~{|0VBdM@1ywA!0>f{D-cFI5u%0TAx1na8bl$N#;I z<^0ESla%Ax%A4IYETo4mb;d7mMb@hE1vXDt42X6j#=`UM#ig9ZYhWZdsTTbbSu07k zif^p*kE5gOsm)tRt8cOW!`Dao;2-CS-Vd!|{9W!~Prt6?h^w{G}Hm!Ly8NAYI{4vhNOE6Bwv>N_@ouvAOF0n&bc0cgC5dG27 z^(w7Rwrbvkcf(}|_=CEs^hxggd+SPhyq)5MqG#kkD6GZ9i$46<*6#oOuP=>n;jx@d?s1** zKkG<{yRW(D&~V(0yDMF;;p6$Ac9Xot-gCn}x24EQm`E1?v;oku=8~hsX&1ng6UtUK zN?f;@&=i({`p=I4Am*id4(~ZV3gw(qoV?`b=RJES%FN=@3)mHt3;HSF?32zG08R#- zP^S7b-f`}${%03%z5rvz5%HY}l3mIFCI1KbGfJFCM7I7zl^UgbqDq?kzWehFdZ&dy zBvZWExlqLBdW#S3|74qqSQ?l%y5Q8O$((_#6>2pfWs8XVTsoG$3RZ`D6V0CGFz@^W z#PMXE0VCx90roN<&q(F}bViKC;;xS?u@P_n0XY3`tUFKe=-F5tRmuGRH2h3y&HwMX zHTqwcmGxGLEwoktL*9|Nr*hRY9)cF?Il({2F>^u~!p>53?^B!p4ZXlMLkP5HE2*hJdfiAOo}oi%b^ zz9Ou|s^y$Png79*O7vs?3e`jl`~|Jfr)dP+Byk-D^OgDJ6#^{n{ybJ)`|bInvT}FT z58v9qtH9ZMN+@MvU;IJeKRZ94;V46Ir~`0Lq4>Wz=>L;pi2V{(agEBFnH{rS>-s{3 z*ZGS0UkZP9eU`%v!r=f!m+FTlP?U8Bo@hUXs@!d;iT`&j|91vFzX|Aj&tJe#P3W&p z?v3P}ii*K5CM?xfar*lEzr*H#55V%5&}ETJd-kloL?zb3$)>Nye$}>O|ARMbKh3BM z!JaUzL)~?8URPHMBNJObJ+}S+-|YLphv-x=jeDu$KR{>;p~&Auzmu=S;{O%H|DVN> z3v6po|EY8ldi*6&Z}>l(nVYik58(FmS{ZO_P_xXf)TR8D^KZ3(E`y>hL}4O5KiU=R zq~JgQQBFv!amTjiy-M|hhfu(>(SKzY)cqo8vE32 zj3%Xrz<7xR8^mR>s0VOj!#}%c*SPOq&k)kVK`>*lGs-rr-q)RP|K|^#y|{_!flO)r zT7Z^YvOG{-y?JJh;%{4UA$w~0qLUpm3lV5|)URs&_CIWItNA|wkH=1ibkKDgkZOd> z!OY=kOsCD;Gn#>Djg|d+ZmC=J)^>OKKPR;<9LG0w!AV4TYb;rec?^%$r+MX4fAl^K zR`GQ0BVhB~VCE)xdcyC&PRvf!=sy67A-Mj}VZBoVMap_=L^uIuGYB-M31K41YjP|P zeUe=%5PUL4+p)mbrC4QeSuisgDlr4V RV>U9Te=C@D3jH_xe*k!3KAL~79JiB77h*p5fvE$5d{$r4jB^}1q~eo0|Ooj3mX$18x(IgabO@Q08kVVFcgrt0RTP#1ORLr2;g4{5(*pw z6b$-ZiwEomQ2Fm#P;d}12uP^6H2^%21&R!Y3;=)xAE&tah3Vx*bftKea}m9C^RGEY>R%bvms~s6sLKZ69n7;VhvS6{_o_!WVG6UVBn?<^_rjmHs~9u zHOHSACS^IFVoo>i&28&PETG~V9TEZnz!e#6Wt`&vX$nF#E?i-OxT&8Q9f3b!%r7uv z=X%k%UV?=BLk6dEl!O@A{pc)b%kF;?{_$Jv*mbvw2k7oc&Ecz}A^zWLjt_fV|EPc= zG0gvq2U3^vx6$7M6!G8Ue+r0DO#cGG8O@k(`-9K49K!*Km8SC8He>|o?8m$C_@*V; zJ06yT%#jDD%dMR&BkvrmM%@kjs`a$b@0HSK>(Ud|&ud4Yc#;)tquiODqWMV52fT8H zf1VCO@f}vP>{bu+FY=3em#k+286f>QB@cwLs^tR-lXRM8J?}n1qNBXZV>v?&IV3O2 z5UC(p65^)3trZ=(FDU3YN}7{iv+84(eu%hG^<5&(q%vNXyx$qv>)e&#`Qzb+k+|JD z%bQk}3bl7%0A+USfl9SpcV;uE$Np~al>`0s=@|e3w6;d4YF6W-$=$NW5oi{>+@^Z< z0_k&eaq-aFEx2%Zz)58~c`&W??n9mZWGRDA|E_wX`Qp6}0+=_*u2Na8vl;ne1^|E+ zvy^xdCK@6v{m`6t@K^{`KItjF-N&@jArL@*w^v&{(Rif$u2DUH_ZJ^;xjLXs#InKd zhy4)%0O2ux)^~ifztpy*DI3mZ%63RL_mSP9h3C76cULKp4;ow|LXY5=`fYYH>GQio zAHGG&41Rj=LI40B!P`1Uu=p~Gp?W&~d_pLC>6`jB?-PFpgF<)4QykEk(5Yx@Xo#Qj z-5Qu+NsF*kIt>!P&900I0U(seZcxs#KIFur8aiYFFv3{BVOII|bdF>h>;sKK4srF) zd`#z-+lAh_fYOg@HEhe>^xqTC)&QW)I=8yxm-ZeSEsw7*M*wW4Re7furnV8L#CPQ zyZPymwE<_z@j&(W>D0?4Apn4Q%~j>LZ1vmOS_=I;9YBCRoK|{mj<)Kj*9C}L>eRUA zUB@|Y{q*Y4t_)Pyw7rCj3lxR;$8vld2i6CM_~s=uza_mOczgv`0RSZS*@J7#InH+C zk8bv*+yb!`s4YBQ9&TN=KxKdEm~Qm(m3j5)xmWC0U}qr|f-P`gXgmsXY2L{ILbsyn zMG3;3?)gWjAsa%){JGJV+wYjWyKX%|?LV&k?6ITSUFm1wGpByC^oh=Zqk4kl&ocv{ z{*+{*bNk8SUXqpNucsA^`Q*rj<>7^|?Q}AyDrXXpK7&J-?abZ!Fzp4Z|5*O^(_Ep56AbfPl@o zB+5-Ww4Hg&82~`k=X&(eRMo8#b7HgS&)IZcMKRz0l4=}}*~dK{l#aO>esE^(V@bBa zZ_qVv-}CNsu?^W5Zwpr^tp~s}(Q#-;V|&dFf0&&0nU(d!has}l3WF})B7{0z!6E|R z>}NNtN0|)804LH(nTZl& zV4t88pVs!;31jwb|7ZaKqj8PmPt4aKliAFGY%QJJX8?e0zDjv> z>+*K3sq9_|l)r`0;Y`Fsle2}z2xyyn)YsA$D|BVaIQ#yl1fcLQ(0S{5!?x&{7raLV zG;psUn=;G83IVVu8`cU*_J}@Juqr?W#$@YUC-nyYgAV$iw<;)SbGD&F1Pk!274K(d zcBxM8KIHgWXFqrO8AA0FJN@tP9c>DyQb4ZQ2h-_IE$xLOCzm~76#!PkTv+qa(K@wP zaO8~&>=Dor-nxRXZUx8e902xU!`dLp&Vg*kx5^OOS4Pak$~e2|;9UOg{C7>yUC zbzSskU39Z|R{%}S)?f4soH}Q#i@e%tD*#ZEZ@Kz9dvU_K6Ak4l;JiXqKVhnP@3UZ0k7L+%+#|YtWJTq^-%Ntx&D|;|nHfr=R!_X?s(pKaCP#hWjxU z>TqZ*YdC)@hz(|ruw-Yjw1n65M+-QPYA+BdI~|#Ne}dl=+jUK8wy#@-=bBe0_4&rf zx~}Dqr^J6$@5OsG$%G65K!Sq&6$k$nQ-Xj3<6%ftOcZn`G-4qP5_V)}HYieZ7BW^P zBVdRL4GcX&AVA%21@>ef&{P@lW}+L*_N%)@t$&_^0)LMH`$a;~t)wptiY%pD z2gjAtHU4+~OV&FDkLI+Ihb67WI8QXg)H;#xj=AZA*2kr!ex$LsfAnUdt6|GF_dCn9 zUUIh0`rm6b2R?3`qm6KH85CsU|F@!IOhlqioym~oqMzETRRF5^fYoA*mV_C8XoR#f zJ#AW_q1+JW8^9lLF_wd`c4Ordv>~)OktUVTZI(*Xzdn`Hf2_;SL4Bmyb5XF}nf$ln zxG3E%Zp|A2#t-V-F7Un@GZXvXgoumvw%0^TL z_y0nr`}gFW?87RDDjs&MVb-D+Gn!z2_Qwm=QYq)h&aWL^ofKpoa1&IegoI}Kr2!{jKfJJV#)+*TxpKb&bPaDk7xG0cS9$rrXG2h=n!EurWMPsC z?8;L4vn5Lt;OqB3PqCZ*v_wd2_z0sYQ3rcN(JpfrJ0~V&g|$ zbeV)hOd$v^dyEi~A!U>yOpm+#h&q(RR|g}EN+yOu*A~&2T1?4rfG^u#drTV-VLVDD z0V`rS7#=H1EAI7tZ4HGEQSy`-f*j%)D#zm&%-oWM>-`Dxo4vw14*ZYw;5)X)_%J5< z10};(W(mAsiipJeV$$OIezc|&HKZf#Mzi*E=om|=MB79YsO+(=X*=9@RCCV`y8b8` zBU|Zk&rOWDPyfjh+DSK;&uI0 zV0$)3BQ-HkrwPBvm&(g`s$+N3 zaahUdVXYna?h?i?cSd&amSs-oz!TEprX6DyHW42YQErKgP|>W7Z*eGn;fUgr*1TA# zyP{@=S=&OkC)3^laV9qGDqdHvr=ImE;ST8Gfxj2j4NHpSQnREvP3gaGo*neDc!ZNt zTagh5*Je_GA$T_aG4xk)W1P7!F~2zXw36h^566gTlhd>dtDJ=-V3I&*EdE<**V+xy z=;+_%G@^^3W8vt%PjXKqg zx*P8D6CNs9l9C>$Q)vb4VDh5YMxQ2r2G`!~Fl7uko6~4Pj6C9;yp>pCJjD-0ljw2? zinVgwXcE+wd))A#Wn|Nk{l!uIDdYpkFxFs<3{et?WJ(`!tPN!>83*Pn8bJ<;!ch}Z zD1tdnVeO>Rz?24RS-1&ea_&qPf{Fdo?9s|>xQ2`^$H^@*@vWd^g`A`WG^@v5P0_A& zi2Z|R$i%TKcrvC2VN5&0{A)b+&}&|DL{hYw?js&NG2&FLwVwP=0#aTM((X-2 zoM7V&oo$iXVP%I@3*%Myco9-!9$7M0wn_3k?hI(y(yrUgVK7l#a>jk8hy+mc+bB`O zJw)bd|g_0y6al^w5x>i+jLqo|qDKL&WKXie_Qy1+lUWvAf@Nfnb zvxkZZIS^J^(~Fx2UYTEHX)$n>XoAL=SHy3#Vx*q#j4=pm;gp5ob4wds#S&IB_m}st zsC!3PLIsDIchC1_64t}?S`s|{@ZUao14Lj`{q&)IqM+U6I{f|y*x~2lGc^!KAWIm+ zPhnD!L>};}l=)>re<{Hv`T89X;!z3hwbjYa>u1SE9kfhyBE7*JtMt$ac|FOLjf>xN z%}+A0SIy9w_N9^+$LT2_JlzJ@H-OYa{a)y=fFT@w;hX0Bxt*cEuM^< zEsxMR`Yd03ax1`#l5y#`gX;>FTqym{QQ6rBwvU?>ikZmUjp;mF>^Vs8fo@=%`kM^< zl`~>z@pn1f_}w8AF6|H;me%Ws?d(%X=r@4YciVsh=SEY-lQ%%9TJWIhg-Kqw#`JoQ z+l;Y2Y8juljr&(@qN|Q+HbpiojzvUCYx)+{6F#iOr#3RYh;sh6F`gw@EDTi~F9aL7 zQVnH|#Chp&dDqnD*EpHb>i_VXPa@Y!ua(k~^*9`2(@ahkhO>=U{Piac1qx*FW-~_Q4e= zA&@-Fz1V#Nl$QI)ZXT=4DWD&#@1o2kZ)WT6`9hfJM-#u-bDX{RRL&z`@g9 zR5veyUnGVNBwsKvV~ji~ke`SNj@tbF3w~E6c#?q2OvtuqbyMwO+1L3gjxcPRJm<#g zw+}uD5`T01a^)xNeu}mC9C*IOIQjrVg6QSq{svgfI-_H+fcX&+-Kp_WIrFP_wRVhl z%t-^mq6N1Fw`MLuvvw9OC_OzJpO-|ky2KpQar>n66!4gIo^WQMa`{m;VI9As+nzD` z0k^~HbJf@w*-x=8Q985F_;6sX>qM|aZO<2#LziJg z*Hb^3GL}8LH0s0)*#BNH8=Z!@b85@>q&(mep4WQCYR?d>5%ZFOKfG|p&?uWQc6~Wa z60s8aF=!1R*b6wB_)t#F9K2_7yFH72S1K8g47g~}7-lp_xRO-v+byl4*ZB5zLdpX~ zJ?^kh-2!%pxH$FVBacgM;vQCl&ZSkymJcplfjTy5L}@cOI{0uhF0%MKo+^=%n=l(* zV6J{7oAtdJX9_F&AM!DGXr>ssVqyC!Ls{LLY4LlfgT=)ej0-vCytN&51nCWbJ7?xDL)vSVB!{laK&3D8GJ-fPZUEViG9D=xA^ zCtPdPDIAT1CCt_JwHvG(jpXBJXc`Wb6E*Eiv$g-yzeoN{j~z>!CX3D`1E1-exy7A+ z$|$9+{PNx7_Fr(r~vnGQ1T;vC8`e@&L^njEUH8{g_T6aCj;YU^qY2Zc=% zy%!ld=v{b~%_2v?(Ar&O5z%@WtqvM=8Z!PIfwelMc6r76sm;X!zu3PnF0=OW-8XiY zeyAR`Dm$oL3SBcrWf!&c`Ot&uQla?)h*WORT4JQ_M{H_5W5yjCx1MFltN$z6H!>fH zjfF<({;KMlh+Nyc$3@PsYlej0U7=!Y@q2;ox%fgeBDzo+zi1XyLGGqaRz7AkN}y^x zg}D4Nx(`N{70h)TGSxPm%%9T4TALqs^j4c*+wQ0o3Vjo)@xu;?s*LmF<4H_Or?|LS zA8yW>I8rY{e>h)7p(&p@o|>@x$g*uToSt<(^D9~(CnE8c^BNvu*8E}o^V3;VXBWMi zGl#Oq&v!=BUB**oXNnimC>++n{z~SeJLRp!;LoJrKZwHh@h0pC(C`VF5g%H;*40{l zdtIH{RV7Nl+8WmQ%>#(6)+?{}+rvzI2_3UYi?|0b22>@;gbJd>Hpqhbv=wjKCn-5T zj(_H|99fGd<JH2Il>*XSfSyW3%p&+53lmZ^17 z_w76VHh*vQfPF*c81K+_KPMt@D2Yg!JZ$uX1ENqEYz#uNo_!w&me%e(&Lv*9owwoF zSv3rG-IMb23Sf9L_#tM-)Yi>tuIa~u^XcPp{l}ml7kOdP5uM$>Gm|4rSHpa>=s5Mx zC-DNL_Q{LT56h|v>m9qoRYTKw+2JwxKl+=c&mld~stZ58XmnR6wR(Yqg}`QjVRlqQ zW(1F`MgLHvuR1_(8u%usuiPxbp`w%X0-4HQbO^09n6&1xZ|y!BO^;hn`_IH^v0Bh5 zBi0dkSCv8yXGoa5muKXDA`zO+mz2WfX5#8#8Sb$ga&?v;Qis4-pm%^T&qgb4$XX|5 zqeO1oaupNEd4W>bQ2r52H^ki_1P=Q^)pnLMH5WLv^8Gcm0+%O&11!{^$)0}=t?0n9 z6*#p1gdk-#&J$5mC4m^tRQaySj*|47l}xIEoL`1q1lW}9Y#uXf9d>X3?9F6{(wYYWH#DqQ}e8yjo+u& zP;SsZt=6*$km%HSLg^Z*geQSbZvYNXotR$M1E}+hoXRYkA7r)3ikV32_hPCdq2EZV z^IXf1)#tHf%jQ$q?1|juXHtDTwXPdp`gkO~qoJO>XXUR2WISg10$2-=4J}`#*U&+j zArX3n0uNfP0Kgkf^9E3R1LV(Mx^dK`j$;>+^aOwNInh6CJmI z32A%-Q0+T5=k7gMW6nS4CikuRCtkJcDqi_%!dbs6j&0isjy}kNvw^(q2uJOxY9)|REOfb;gG`ohC9 zlh?q5rGC$};6jzqP28R;Oi3cz&A$_)A*~DFd$FoZ6=?z4q8`wh0(qw;X>Zoa5Ia7e zA%`&Uoz{0Plnp5UVmCukoX6a|Q;^LlTX1!~zjoBTkBUpH`XhR=9n;8eDI6Hww!<)G zQPuG10`=c%*9+J@XlEX?u!IQ|$h;P%CJ>8erK5ZwYS`U+ls zP9cB(`t_$gA#?G)#B+;g#`(;TneItPcKnFk=!N#FF?h*AUZ(bfswFQ$;R9Y*&Ks6X z7rrj;y&h_Yh_p>byfs2N;W$g;C_QD(nI8Vd zcRetKl+skm-v#q+74d=UA~KBYZI8L8X}2-T44TBtfJU7XQ(w3 zEW(8;d}ugeR(Pzj#ZuqWFO6GL*+m{Hk2kM7P%?Z_nPRB;s9+qwr3KFl`1UZZOh5bJ3--K>AU^-X z>Il5PIszotjSUQvR^{G#WJ7~6djy)O7kO|}C&)Q04cMP5?JnwC+K0ZM;&}tb$|bHU zls|DXslfo0`K4r63MV^=01Ml&^N~@l$`L@^jRQh`+KVw782?9o*v(quz1HSGWei; zTL!1)WXffM^@KBGb>7g$*2%CUWi|Vfhnq%oXew7|~-TNtdB(x`d{gCEfn_?Q))o2+RGOA5uQA zEx6rZ)^DfPlbyACF2fJ@54E~8;DoPAinOZEf z;bVi{AGY&k9RW;KR!wqw^8?`|kdcayC=jC3($*CO4 zaiwyVw}32WABKe+(KDVz7maTdVcEq`**+0=c0XKX)P%3&*(Pxp2o*hA-pO+2 zoNA`IP(E7Dg(lXIJCD&(SC){@aqers7A@t~?Q?3vRS_x9Hie8ZA>l+3BxdM#lOmNe zG}&^?UH{Rr2ozu4g=DULm}`)kyg+v3)sr9*6KDI?dke8t6~^dB=fw1gy}q=wwK<=o zzZ?C6Fl|q=8#TLgDmlMDWYZH~TJm)hL>^^cZ8v;5Vb}yqirVKd K|F^BLBWVu8c z$BGK71i_oc6+X(a9s1T+&S%5>q{oBge7{@T^&d>l+y&^E9xB|5)OVT#D9(f))8O#j@i= zg=dXC%mX%4#IpaDb)Oh7;fNb&SgHs5 ziGea=YQV>imIiyaOv1B|BjrX<=`5ruTN4F$hfHGbdw`Q@{Z5z}J^cyRK-EPkNn)EJ zOp7$N3nj0cMnw2mWAO{W>KhS*BV_kqsgM_0JC{-iVX$135-UTPu%C?XAu^N}K1|n7 zN=OnJ0>?@@WB>>#C^#en31 z77|f$PRQF?TRP$ZCK1^x#Fmsl0w-0tg)G9!==y+ZcG;OXrIhK(=jud$g@D3qY9`ni5 zcsY8>t8WgYc6ElZG*F=sbk!wl+*HAmGKoS~8qq7!<&KUbZ;?b-mZDEZcqs8E|m`y&5jzB2S>nX9ocQH3u(5d`+ zdQ=#9`t4fHuC0jLJyEuBE~o;aS)kq`#UYuZywHrE6ihy~>fEDGv6(C+g#)trj7EB{ zSe?M=^|&iCe~f)lG{vqFi1btH$rEed-iw zaP^LUZ@TkFy(W^6ppJ#9$msFfA|CKR(G$=Utw_Nj?)rxg#zO4FKNKA-{OqjF({d5X zxC(6+TPxH=Lw1K+`=ojH5mKo$`Yu(~SKVlF9@bSQoxX6S#tapCQ&XubVL@8=9J{IY z$yJp(+fs6CjxN`?kiDz}m}AwnZBk4~2wYgy@A z#`iQ?I?*HBS@0mWPy4dTc^Ww4m<6y^l15k`X+K4!Lg(o{*sw2?HqW?KXJ8wnwn!8sjT33!{)X}fIPk1jD$HX|&UdP- z43tVE`z@m`SyUDbK31wN-!Yq7^O#D;Xn4yYVbFb|7uD&%(1!Er4N&IH0u1?7_RLRT zi$>tN%BPx4CJx7;Wz}QHOMxLhPleX^MUzwmU?Y;VSscwu<*Lpg4XB)+613;(hgKJ=nq$v_TIg~WF#I@+Vd*fN{ zrnn3+90#{1w{A>CYOJo^XcPUuv(8gQQ->|Lq1RZ!n4D&&dAKV@poBk;#&M!hWo9cI z<_0tK7}^#SyerqAZIXq4F8n==2(^cjjD95ICZSbOM6-w^d4!TQeg&<1s? zr(`~4HpV(6xXdA9Bha*!g-s`4akPU>wKwnuVTN1B?kl9FLz+7uVWh8H`inU!Q!ZN* zOsQ60ZlN~`%=1RsZK9V_>itvV*n6t+Jr7m(Hvz?=6T<{j->LFB3-aj!{f(~3>!Ax3 zkv_HY(iQAM8hQJ@;*RO%MJit6Y5n3>=aSGqq!zsxGUP#Khe5{Lo0$($zweRI$`Cz0 zjjl38rS;KEY%s{QhD3%n$Vf~pP}r1lg!gPEe(__sleI-AzTOxK-Dy-m>m_)YuVj0I zNj;CU#_bBFBQ2TpenH`W-> z_=$OL6_Lf7S6$)O$v9b*qj`n!*kX}epyU8FVZUhxjm^+wa&fb6t!VbPf5M2#=Ntk} z@-x(fpN=YhEr=~7A+-Um#ij4TyET+IaI*R}NEw-E;yyk)2EWhQA>#U7wqM0PRcjH_cTfzbG!v|Bq84~&Fpb(PT~YJ=E?&(*6R_v!4xp4GGd>v>4@zo*K!QxU_2JuOcN*>hE(1F7|IgQYW|KlSlww!HT`|i zk-R^wRy4;^l&PF;ddFP07%S~IzQqf;Pvju{NT~H9y_Cv8qO>(<{H2a=I3#(GBwWC_ zCQJZz8W<^Ao{^2|b&G6vnt~{t3y|JQY&NLjmuJ`=&cK|zA7Nn%Q|=N@3+9PFsdH_7 zdAJD|t9{2&8(NeRRXuUrMP%aw_^)urdT|a2) z7)SoX(IC%gn>9^fvcsHBHnW7wB5i3EM32XUfBx$1GSk8l2$e@nBj7yqoHBOZh zYP_~Y4{6l}?IONzRQW!%3YJNNnQ@6{g5B)!mqJ8}(cbA>$PcysNJ#o11_5%4*HrVi zBtIII42AooYoRN-$X5b((Lzu2n?8cvDR5eVSzY=8fT6XnYQYx_G4JofKY#U1OzagQ zE%*ZG2Rcywl=4R5+p|w*(3u1^6FyPd;}zJPM$9ze>Bwds`2Ok(NPw zVv`z%R`I)%)0Vb+wc1|)9qf4itlZ!7OA_(NAU>}@I%$Z6>_@r4h2!)VrZ<4+He$S5 z=61LFWZt0ilw}7ZtPm$u9o|H|{xDix* z8I%^$No%>Mf{*nzBJ4wl;%<_cabzyW#nay@bPB={UU8wK?8+Qm81!e`X%xH82YP;OPfgmpA9b9d_@1UxcOn@ROPrT$qtjX89l`o_Ap&or#twOPU$45;5t}wPyzU;b62}K*8r!Jd5 z5e=P_dxVe)Mppic-u=12qX_2>K*MqwRrUs$FJ~!v11OY^<-YI~&=&e-D{DAt+@0#; zsFU5Cgf8rkTv;q(>63@h{)lfl3~=SP!&g}8i-R9VmvOQQ){=mGKAED`4#SB&FRwQV zjrW^~c>@T?qoFi8j4oy;GA7TqsVpH~qav1W(tH%y5sLm6%Uh&!5!ofN*7-}fi+o*5 z=&5X|`3kT~w@@`|i$Ogp86)`nd`aFKBaxV)yBK*5PcDI?6Re&5mY_uPyAX4n@?#xd zGD^zJ0(50N8mxx%N>oUU8T6LsQiYy${WTPiroiuv@4mBV2~duqSuZK5VLDaXEtnp{ zBZ`DwQYva?rRA&o=tMqm0P9!iAG=8S=-orW{W+1$y8Ly*_px`m;)$^&P|VCG3so@8 z9Ul3dF*oAhNL`CLsZ<)$@SGHgjI9Ov(1$3TWE1BtLuupcKItky2nCGH#d_o|ZWG+0 z?P9j-O4fei+Y*k)LIH47CC-#q&(0z2qR7nf{#5N8@K2#l5=^Q-0!Cyq-{z0RGg!*= zceZwCgK;x)6>)DRj-{wJ2=2)ULQwrVkU0}y@q&{V985fOCPrk;W}=F6zN0Fpf}-AkXm4# z+>h}iht78?$9UZ*RCA0e`s@^|?u z4veeeN-)|1DzKVdO`BYNl?2Y?bt(nOp0?4#`-g?W+y)w4`5@3(WMkE1>~_`;8QbDd zNXrm}9{F}R_@4xkjr@P{LDFwAz$ptzZB&AENvCeNi}^eWkY(|S#vqt zL_2JNpov&ZUOpf@aylVfAkiYUT*DZCnvK-0V384%`!Ng;*TWv0Ei$RCg&O|}Ux}af z*WjE#&5I}m?}p@q>C#%Y())!=BtzgINVE@!6Ru3OB15YqzmWHtf|yg|T%wZ9>g!Am zbLu4@HSVvcc0o|KW6uWB7*Z2JLzQR#>O#GE8C8{2E1@y0MQWfCbg_=3^jpqcbelQ3 z&>CEbrkKg{xff|>P2rH+HnB_0sJNSVOqm|@_ajtNp&gb<8zPKj)DuplxO)mZZ^zX* zq=Qq%%HtL^AFiE2DF6oK&^D$urB7e3*x_>_H9b`n_DbJfH}io$mRE~dE%4&wJxhM2 zfx}3sfDX*##J=my=Axqzhn-5KUPu1@w1mB0zoSSV{zYcK2&LaI5l^l4Ab{aFjl@=r zX!CJT$~s1hXMW`=y+)aVFKWDqbRjI4zB_m^w}kh%LdL*UE1QgHn=&2vE+_s71ZS>J z)u8&0anXxOodO0)_1oylA$$dA86UD&K`l4QXTRE$=%u)ENfu#)(((93U(^ zK?@Zbw>~`|PGyP8nX+}bD|gvN&bj2c(1rP=PxI^as5#-EzByBSM#p`+L&}|>F*7QU zs)~Ao^2V!>k-DoP5b#`?CHKv*jv$t|^l9+A5eY+1MRt%09)Mi2qbFlAs*^ECYV=HD z*vE;2OtXe>%#2$zgUUxOR%IQ^gx5MP+e-^v+~(D1HYXL`cpTci&219Xh!tmXHc8der0*P51$C?40-W_DV zjTKv}8^<(RNP&Zy;%-*#Z^b6+Yj>`r+>4JJ39**(-4bWYMrNnJI%>aRW?J4UB8$=2S|eDC0hJ0ideNuLVU_9(1>!Sk zO--*eVaFx6M~lhIwSrWEKtW6$wUnVp;VfC{;tG;cklJWK)u?Pn)-{K>q?IajH)EO< zuBFnx?1X+h*+M7>Wm8%E(5s~yg$9PDiDX@Tf6GxDhd^ZqzXRSRMi(w;h_s$00G)p* zqx+?X91=hFATE5f4#Iri7imSVr^EsE*v?9xgA+MjS+e?8wkHry6kAo!gc-!7zmC0= zO_~O|0i|-*_~{UP>^fRTPe7P#pq+=VEnHlDST`f6H#gux3mRfdNm4o9D@|#s9li|6NRg+IS#Eblid8vrnL{ z$%&4b#U9>5*_DN^ww3Ln7?=fk09QzZ`K+H&*+GEhR2^87s=9tYhEaN>ai1Z3AmuNy z5_eh--b5Znga~mCfz70Pl16`?*fi&)c#v7UrOGdKKPO>~cAgs0gMS)Faps9z?>h^; zN(G`jnMW_PWio2!$SEf$9P(R@U9|MT;X(AGnMa1RmkIB@0m_}d<(mXV-PEzqK3HTC z3R^%$cqORn~=>VhvG4op*o3sDleLJsu2; z)xm+jD`Y&P9E$7aO3P8^BQzn3vWqBm~w1jF?&6BfjATs6PeYF%2 z2Un2{760{uqmu>@g8(y5Z5j6B{j1{weMdmgN?M-hvv;A7`z5 zI5CfX*6o89-s|jLbCU$+j4gy>GeIVkS#0@5gG6;m!->e)DNkZJP}E?p@1t}Le( zC-7X~6KtPgLcvZ{ggeOrceEwQsahu^#i2Bw*!r%PeO9Pd94qEZ`k+(?W@&s`h^C_k z3d&KENBbbfA4M6eXiC@8gCTgfK(AQXf-e#B>+^^pfTa?b^*m1x%~Uax?84Jjf`&St zf9jzMS$-gQ79s3e$GuWqdVooH|BO}2vh;;}3ByG09xR8in~WsH?7(F>=qRc~<<$_c zb85Bbm>hmOl`smye5&fv&y0D3E4%3{K#P`fnqvIRlMvtoQ3}j=cnxWNbBcn2PB-F| zDT*@Dxa=v*7VED8>tOdv>){WUT&Y`6b`7Q<{eqHBrF!)_INtz7Uona<-vFuk!{E2w z9|oU3rnWw89(#Q7=evaGexjN!v{0eUq|(zz(Na%XVtFi4kXR?Ojyy}RXc_bTBqfyQ z;?a9VA&(`_F{{g*p0-0|m&E9OMB zqH1(jv_#rwCOLBnRr7veeccOk!t7;Y_<>?p*h&o;$gnLW@8P5`TZj>u2TSHtoE=NH zH+%sl1poP(lRYf?l{pEU!oXJgIx1;)MC5f+BKoUorF!AFT*%1md!GsrPNH7Oyga3u zs`&RG?N6C9l%}0JT*IG>5g8odlJ;%9`D+yCOuvZ9*rPg{-CXbpvf)dYdUGH%s}{QJ zvVl-L=&O%8-%m2JJR&BYmAk9+(q<rubeYI_ttgH*Oe9Im)N!|rqzJHE(iF3?CEZ#9~bJfMrUJ|8xs4~m_Da3 z3qc&r#c6VX@#N18bA__R2L={3!w?h#xW!4QPkimfbw01I{)(hY^&!so2l$ zGi*5iAP%Gfm0Uikw4VrR!pN>wS*}13S#pP@$%FBh?NA74*qCfZbpcjelz`Aa?PiEr z=}2UU=$rHWh>w-Kb|l7ZSrBVW3@z;J;b7iEJ+V8W$4rJ!N?2^tRmd#4@djZ>-%zzt zCt)kVwG<|4vY%U(irxTC=_MaNVf84-v2u6pi^^Vs1~T1KV&cG~JgXp?m8W4TPprqa zw?u@77pBOq1Gq67*wj2c76mG}@*k_NJq4)2V_eW=D=_=&qPLLrZFSC$_^mqGYD}b| zrM~p+$Y5FGXqnr;QVTh}65s3d4^YwFhLtt$2`72}@MRWZUx3d6#xz<04xyv^<@#{_RSp0s>i|0yxAIq?U2hcvoTY_vixsT#XB6y?#mEy6dAmfo}nU+XKdoU*Z?=R@F5GRZ~+taFyXf5G?+F z0==EcCl;in$I51aKGHV&26GG7{t&fAT|@(;O#YX>)rM%T{hu`HANRH&NpyyOv1J~S)}ycHT;<(Omg(9=g@sV`o5;E zLQqazYi#{N7BAmd{Y!OM!tEFntoHdPV_KO&(`JLmaYPi(V}Y==4wfIs%WLa_?ku|@ z07?Wwji6p40)HwdN*AV}45iQ18Jt6jME&P|44%?QR>ar{*gTTmYiz!YVfn4-s;tQE zNF!hYEnrtj(w^Y$xwp=DD?S9RLQ4*as#o&jxculxPA;sVCirZ z9E6`BcBoDFvwW%4K7P)iKoP4gXe%%j#~pw?;$Ln@2UMtnqQFdC|Z z4oNKQqtU)D6yu<+XkTPp9pv=fLSLkWIh1Fj!0_#n-ZD&Nv$g7iU=nk+V4c3D$)!$`J?Rc*(R$4b!wnAw zA|HzO(XZO;*#)^}4;K>jxEaE)+8^}Q&AbeN-&P(>+b?tKIJ!-Mpk!YFH1&_4& zZx%{d;;LGKqwH>uV>=;Y;+dCA;JWdj|J(q0UwuY)#8AEme$o4XJTCpr8(A>F0!{x; zB6a4ntDd_57O<~LlPCQ5{NG(_{CD6D@NA-O;`bN)lKY?P$1ayK&q^4Mi5;b2kdq5~Kdd(LQ5v8C6s+DU4DSeeKpENL zDA3rWUYa~U?it3~CE!8MzNG+OR7q#@PDM9lzN7;4wwXg`Ds+adH@KtX=v- zF^x~Nx!npv0TRJOZM#IpXi5ScvI;3Fp}oi{jMXs9PnuZpC84nf?EiD(1xJF_Qcy+c z;HRBu74|wgokQH<5@%G3wefHjpJ3KNA|XUyawN;~ZLm*HUooHfes?Mhx$*{B(qd+c zc5K+jVoSJv?9P{j#%4i zsxXH(EVEySAuo{~ynUXKkLRL&adZe#rFUBMOKNbqiG*7VV*3w*a})Trll=T6uu|R8 z@pKK19ye@KE=fu!nM7!ZS>&HHKVi0IWKqULn9z%H*Fd>N_?o_^ZxJz%Vv73GfH`q` z8oJ)xk7Q0qdCu6;D1H$2Lv&$iGHOhTO1*KhERszg6>s0LUe4v;n0W(?rli~UtcRQ? zJiAoO{+eoI9Ol{U#{c)KpfN2zMBR4%?WazI8qJEulm8TeZ?WN2e~?c*Y!R0BB8jsK z+5Rdi1DXQKg@QY6x6@@UoCUto->tmo$%23UCXlhnG`alIMZE;OhcQA|@-&qCziRoa zpgP)M+im#93GVLhE*p231ox2O5G=6K;2tzM1b33)?ry;e?!jIE%YUvvoVs-$?o5C6 z^~}6Xbyd%->h87X>SU!B6Z2H-Wia$m0U^TMX=z8NP(?My{&O@M`A*?%zf!$pLyEMG zjpF+bh|x4N8Ixp6{o-Fm$@L7A1Zitjbl~n4If9gijwZ9>BSn*g^tfY%H|eS83cCn> z=3v)q*X8wIH=GG5Z}F4fWo(+Ovf49@L(g>6>$B6Y_e}IvyajgA>9L~`Ea}~^udcKB z2vRLVANoC3*9J(A240D(kj_ddPmA;0wFnL*%i0Nd z9c#P6J(G_fOG6p^u(XU4s4$Dnf94Al?H*3U%YkYc%1CUzEah(1m=yjkRG=F%-iv{V7P!lYh!ZWzaj|j~ zPa#sj&&_|~LvAxN14pjrd5j^m*Yk6=oC$_-!qvA%ADT9Afz+B&)3IYOfuiC!4%#wN z*ZtWQRfjO490~FkYG;G%m+bLFa|eCaS)LUY0{+B;hy}DwQB+Y4ilRLw1WG3N?yi>c zDQT`>jCYHU%~~=Thw@nmCF1PsltPn(>r3H8cE#2?)u{N%-%(oa&4Z(`2tM#~YfcU! zMAuUrc9(UcZ>UbxzC3;SMcgj@vVKzi;Z3_(XV3Q?PF!>2Pv73oTR7uM3t$gsBrlPF z)k8*B>iu);n8a9=h^bM&?&~Y-oFjXmp(oSrUoP)8bPF3>#i=9Lez%wOf136s22H9& zKZDiC+xBO%Uvht1$ew-SwLA4QBv4U07u=yZ1v|a{^B2u}C(XMI6Y%n%THGm$_nJxd zY|2`*KiDTtdz^$7EZu^uCXCNlfI+N2E0-MgGh|Il6AW?@Q4hhV%0^x^OU#JX4EKtx zm@+3FfXC`i@uM@$<|pv~yny@UXsi4Ik{{c;y6)8tE<$|T+|O*4EaQ*ItPmI?xA_Jg z;%B<-SYzoaEk?3@WT0KL(o$4-IJ)bdKDVH2ty8iDiv`WTKiYJAp$y$XLUD*1$X zP?ENL!@Kw(5H4X8_n{ooz7h{tr|yu^(J@>pFQK&+SexHR&Z55V2uPD&tS`%X=ZO_^kGjYNqXcG=><}U zQMC1LjsDf;i4c{6`Ph%qs5_^yA&c6Bg)9?m0RIV5?)}ro}5*j=DDgBc!rCd$q z9p9i|$B)<5&^oknZ7{WJ-rBgB>bB_!-U}7+B1H=@ojXS5HY}kYNP6rpLb`MbyV$ZZ z9>5#(;`6$CyjX@VxIIk#Aej6-=H{QHJNEEdQ@;7LEE73>c7RLGr-x)QD_<|CqU_Br z;|cYXLDk=rd4(Og53BW>!P<0M=VO-8B8xN(ZAutd-I_- z`F*|=u`@C0ju#7-{@Z-PW>iISuQTRnLvGbKVNNPwe7~D-A4Wc9j|$ug=ZJ`zcC}Ed z68ywuq8hw-?CMuS|q;iNbwg?OX)Li$p|~^NWBpt74Z5**iIX%?S@~a@#r8& zEbsQRATjFwHxU1W`^%GO?Lsdy7j)$6&d8A$ODR;ku$1{Vc&}TNHIP$<(2l-OwGRQH zkc!O^8((GtYx?@lBpUBI?~|umCjFeQ+i>R_e3a$kLpl=U&yk3~B+kXD%lL{Fb`qsSir?Locb8xRl#K|uIl_y&Oe zFTQz&H=O@=p(rJ5r#8AY{s&j{Up@&|3X*7qi(jS_XvewK^x_aB+-7<3&&qP2KVMW{ z#Z-&XeR=?et||fBJzbN4r|w?mfUnjU&t#Wk2B$d%qt8E7Z^vBQ?<&gn34-Y>y&qM` z%TTOs-AG83zB22SY8Cw_8%r?STx0D;?>L#fT>-LQ>dk2xxlF@zh!^m;} zAJdj2UP}?xpDYW3)Yh+Q0n689Oje@9`7b&J$C(w9CbJ~V8f8a6-PGuC{?0oz`~L7) zta+>loqQ=b{mbjX7T~Mrl&tY1CLdl26%*-I(B}TG%h?1%F32lq+Er#q+HDbNh42)(5HMsj)vH?IPflMh^|Xoi-Fe`?D28R(=UjM@SI-`b znU>D_bZD1c`xk2-KC2b{e#nw9zb0ytB*^T+{Z9^xhoFX!OdYTa@fPF|5zTG+BH=03 zJB)@SrI5SZT@4b}^A%=82zRiIX?B8Iy5Ieaj8Cyj7Pn{q^1!=c@l3je{@Sfcc}=VP z%Rj<0>ex~rKK$XQ)7iURJnzF!j^V^I>JR)_^mLSMB=o=v-=|!bx;7u}{4t8knC#rD zbG$RLHD1e>DqQssfaOUIk0WBH<)y=qSJjp#Cysop0{F?>Q2*lG-55qW_^Dq)sBFyU zAPcfjDU2c?#^=0rc4m~}f9Df>fLEOsDW!<+fTbP-ts*uNth-*X6TVZ!h1mETH=1V| zdzoEJ_z4l+2l!dXZ{%$)AVHBNP;)<~?u|9_&)TnrBXE&8t`?w)$N0SsKR^cmWu?-% zW49{vAPjB-G+^dcPTs#`0In-_=;|t%YkQP=Cdrx2(=HF@N^{z=g_ajK?4woHS zwEqQBT&|VfBsS|wIBFtPeu3m27R}+X7vE|3n!-#+5b$lUrCo>zRp8WFy=bGAe`ksg zP2SaI*YSL){P(j?v$In9`WNRWwua*kG?b~Y91D^12K&VldCIYT3#d~SsGmH}y2#HJ zXBi{|a7~d7i8OD3vh4MJl0PkoiT$?%l|z3-oup2-yd$zm)^02oK321~ z3A3*5+JMHPW7hB%6zVV_Ci_i*F%=W|$gKz*oPl;FKkjD0`f23hmo~XC0X%6UIc0%% zJf5o4t!eg{6$fp$sz<_3hBH3nnRCK5MtEucEqj7uFf-#o#iE&Sa}hf}leDGVTiuhV zJ=z@$6i%4tooKnE&9-*OIf`RsIjizQ&cY}wLBgMT|0Ch?qdk?`%Pu>1qiRZ$LH-AeZ9Cqk%&46$*7kfB#sPJE~u+xpwZ8z&xk)~C7)jAWfh$Ax9 z9|^}bpk?koMIG3_<};DylV#oGSj4G7&oM8-<$cPS3|LoR)UZEVey zc$3p6{-FZijQbU&X5m3JYpaJqoQ)P zzVZ=VwA?Nx;!kZyt!gvLur&j!f6*Ls@KmlyNdYREiEc1hVvUEpe6|? zNe!7EFdUe-xyZ)64hrvbMnJrBcCg`lXVE?2MDWvPhWVj<&PL-h%c){^ejHuq`qeq~ zr6@s?*?B0bgNu8FX3R@+b7deDco|jDDd8dz=$)+k;)fNz6{~&-=$$+6SqP{OalpyQ z3BDJfmy!CJrRAaUR~2DH7>Vo2`Icv|I>gR{rov-aVy%AIPL;Kz{2fm!^_z8Zj1>uv zr?a+mGTYZjb>PgA*hiP7LbU`GzEp56_VUr4h%@@L>18*uoMA$1p7+D&h^8~@ty~Ht z@p2;l&E#_!IJK03>g#r4&U!aa8WjJb-W!Ion54+v7$y03oCBg=A-nUqQK#@Wy|IoW-VDTV$oboM{wQv1>ZuKwjYR8Jeu{1eq* zw09+1nfBBNVh_LF8HmzgW#!i&{@z*FI@#E0zLX#pTYm96kXFVSvPLfRBTdo|(|A^k zDt>VG0#g|_ke$kp5Gc&D%4dJQ&p}X?8t<6)c!^iMWA=N~NmYIoYql-uvrLh{=rrRT zi5S|QFjql_*1C;qTk{XVwC0T=7X~?ol1)VTG1pQSA$w!`FAf0yK#Nlalq*8b=OzEE z!@~0{M(EA+M9d3}kfHU|C&1Qs<`HrGbB9~`^^1DUi>wf4z^q*3S39$j=AFM^UuEH7 zS}CoitJD2i$J4kaj`}@2PH*bg>MV)IjIWPI`*?05!ehr3^})rXZSQ)?Y;x>#IoFU& zyodh*Tt7uEq@g{&mbQ8U%>Pc-{3{9kufXnqjMn`3s*R@0EBgnFGw=Kr)pVUr{cjyK z2`pz-+On@Z-(52j3ee-xzYjGGPDW6;AWYH_gUlnsVwLOQ&;ql=Gi5aZ3R%`jpz-&H zoqAp~tgnU(jhcTLx0@JY-S~1O-)x#DB~!6hj^7<+xm9_LH_Z$0{TE#MNV?xN+xavj zq3=I@s~SbXo9DLqG#El~qh{qD?ntk(9KN3Gq>Rpfl|~3qLQqM9-gGUz}L2 zPT8U8N-$MMT?F5VWtF=CP$g|WmGu`Bjv#;)M;G$5=J2maVw4Lml7VE+Xv5@jNJPAC zO=|QQqPW2KRRCD6?>R}2ZBG!&qx)o<1yh~I~DWpBns1(Kh zK30o}5vdteJQx^z%;{P12EKo^Ir?_j6=W+ zb%y=-|F;>2u^;d%&etG^hbC#_=P$mWX3I}N__nUTjK1fL7${{-$4u8Ho1zJ=DU=Z` z;Y3#Hm3+^|>yPN49+=+PJm;A=ilvN0Wu>Y_DnskPa>3WP~EP!B;ok=q#zzSmj~O6t@SS*K5PysA|h zjl0I%xMKU6r!EX}TMZ@Ai}bn)k^`BniCSkKqd%%tsW3>3_zxpf2L0a#PrsMg&5~0H zk=tnewRXJ#MNu3L!Uzo87AQPK9$G0oSbgru)t_Y9e{+TUfbbdA9%w3A{r~HR#9e0u=jYo+5T=#$;R z*tjUM>0;Hwfe#>KVIu&n=>-#Iv6J!8lyf{lIv5Qx1n(}9`@+UgM`M%qdHOM#? z)T)f^>0!J|8aSX2go>yknF{r+5!4fQa?KN$X$c&%hlK2rwYscd?EeAqdZkVGglj=j zaF^4^cWi@{y0hlslEB(S(7x;RpwuEBfD-=3IP#l$C!-@)2`Iv>I;i^Sy+UvA`9A=~ z$OW}jn-HeDqsr_pIo~rN^AEcC#-MIk{@+c@L>n}Kq!txGJIqYPK!n6ZPLbehZ5=KBJ%)|gIibO=7ot+;kP^CuPjOSU7_JD{q{OH~cSp4FgUwvAHYSjWcfpo9^cm+P0Y>th@&qQY0c3LB2B)OBr{+Fy#}W^ZmJZbJY+gS8MlgK1L+O#(Vu`fVKT+F3QC_l9HNoZV?J1RDxDZ{yoewU=dx0{zK}~M&4}W(-@X=UgnJ&uZC!?PH22r0*B%Fmds zL0C2Qo9zq>Q4B+njUTaINAeVz1er4|xX?z|&54(j+9EK_lIXJZ_lM{(Q77Sl&h0%6 zYp@O*0fEoruN6W4pS*A@K~0j~Sh4wbnOg2d6MPJ1=IJ=8`_Q>3kJkdgyt!iR}F|w*w=-g8dsVagB(=YtCrTJi9a4UMsliEG^Z8PBE8P51KIAr9(&GF-_K*sgeZV#Xw+bAmLB3L9GW+@WVM7)7 zi93?6bof|uy7GW@Pp&UC4X#vwkN&{?z?LOC0(1)MaWnm}lqMBbcYQGnm)la2Js6GW zp!9u#Gu8;4K z2MZ<-yqWr4ysjgL;4W{I=OTK+>k_r-*C5^JY@FF z0LJW?D^7P-(ekw`h}~fq=DoH{HEA1tDR>7$qt3cEKsZX+BvOgOX}GgH9|@4rm(RQ_ zwutWXAywN#DvN(#iTKBI( zhCWc?3f>W|cL4wLYkS%Mn2u*M2Vj@g=ZxNzo{H0}vW6#TMtP&K=~jI6CC@UUhM+5% zxgI?virADVRLcedlB}A*`?O3$Bw0w`9J$VY!+fTTrQIzvM)KMH$I)_i?WdV`)GsgP z^tLG=5(KJvNH{gSCiMb}Jtye~O3`U8_8Yl)*Lq?G@M#MxI#H?I1!Z484^3XU9WIJE zdA@3yIAr?$5+C&>5E%flXPwEDU{X#J+uCO#;y%HV`N2eD4mtNA!fAR*(1ta?z5V(o z)h1w{7zA8EsulLV7Uu5kO4F8t^5lX*u_kNMO+nk#Sg6n>*=v7*p(G~_Bs0|YNzgb? z>13HCf?&7sc!)R@FakHBdzu+yPIMQp7uB(|Pj%v)!6(Y^B|#Gh?&L*yx7x$Jh}e?q z!U~G&p&X9s!Kl)h&Z(`_gZ!htSBdd_HEqiF*apxP34FZ<2c5^s5rG2SQQC3^$24khjzUC}jnx{%JsJH179vD01n3PB$yInWr@8>Vb zPe`QRmq@S%*F{P>j4KsoX^wb&E!so@85LSu2MwL2W-Zhng)Yy2OSuZxeK~1KLbX~U zxDsLT`*|21jf{dAeqNK*1U{a7MB|vk(VfLk94$RkkO2Z6eMD2qU@$dhEJ>O?isqrt zeoLKL2W;1m2tA<)OnD430ui@i_DOzvFKag+EAIUdfG>Jr!aM%H90sTNLudZ_i<0hvUW6cp`iwuy&~6N&)U_!5Sv{c_=jkQ5b$EL zbIGK_%SCYS8XlYx1sw57Bzx9ov_Q&t!71M$xacdmcK4?JEh-Ki3>0S;o+c>8=v%wm zmLp;8z*>9zm$!MS?Tk=OV_&NTsPw){|6IF++GM>+5vioaO zpwr8l3x{iqCG%OT%5E)3cbSm z67);1izRnbYjAvm>5h`s7Xu)T-K{$v7}W2tB@bJC{Z-RcnoLjEah!6dx?Q^7acr$L z@=J;7q;`sENW=+hpu}sSV!nMmMe*j773(}bz(qVz6r~OL)c}lsm?Qm(+El0{t)v`s zRSy-DMdBDLZARD$Y?+q<6bE5B=Vtq3I{JUCH3Jw=6=UXQFsaI%=m&iEgw*4;~S5f()XWl>nZvXLA0g_;BD`2TR91=cAsf(3RW# z!K>i-<`>28Ggs4zfxLJwO-)380FkWTHkb--I8?wbhGS1_Q6_*J2bX$(Va$Z#nT*AF z7hKiK==Zy<Vy$mD0(4)9db$%}7s|mD3Eg+4L;Yl;#S`fH%#C&;cj`3s+!(YGQkqWF4mMW^>^Q zW2Ur5Cx8DRF`kB>0Wx+{7%(d4i+qr>trOYSyWerrxtWT^Dq8TDq#BnU&1AJ*h61F` zK=y!5HhvdkuMOtt)@XG(;?#Ho89^Oz;yw?OX1WY^lR}H z$K+ixDy7V3B6#gXy!x_mZoC|pH|iJOT5}X3Gdw-^AAFn^qtkGzAws2L7ho?GCk(CZ z=)n#v=)*LRLy~}w$<>zA=1KLKT2~1*C^EEYWK2=iKp3T_HnjpzW0vt~u-)O!0WwNS QycZ);WT&M6?LUkE4OJF&J^%m! literal 0 HcmV?d00001 diff --git a/doc/tutorials/features2d/trackingmotion/harris_detector/harris_detector.markdown b/doc/tutorials/features2d/trackingmotion/harris_detector/harris_detector.markdown index b1b8b67f1b..bbf4fdbd5b 100644 --- a/doc/tutorials/features2d/trackingmotion/harris_detector/harris_detector.markdown +++ b/doc/tutorials/features2d/trackingmotion/harris_detector/harris_detector.markdown @@ -118,9 +118,23 @@ In this tutorial we will study the *corner* features, specifically. Code ---- +@add_toggle_cpp This tutorial code's is shown lines below. You can also download it from [here](https://github.com/opencv/opencv/tree/master/samples/cpp/tutorial_code/TrackingMotion/cornerHarris_Demo.cpp) @include samples/cpp/tutorial_code/TrackingMotion/cornerHarris_Demo.cpp +@end_toggle + +@add_toggle_java +This tutorial code's is shown lines below. You can also download it from +[here](https://github.com/opencv/opencv/tree/master/samples/java/tutorial_code/TrackingMotion/harris_detector/CornerHarrisDemo.java) +@include samples/java/tutorial_code/TrackingMotion/harris_detector/CornerHarrisDemo.java +@end_toggle + +@add_toggle_python +This tutorial code's is shown lines below. You can also download it from +[here](https://github.com/opencv/opencv/tree/master/samples/python/tutorial_code/TrackingMotion/harris_detector/cornerHarris_Demo.py) +@include samples/python/tutorial_code/TrackingMotion/harris_detector/cornerHarris_Demo.py +@end_toggle Explanation ----------- diff --git a/samples/cpp/tutorial_code/TrackingMotion/cornerDetector_Demo.cpp b/samples/cpp/tutorial_code/TrackingMotion/cornerDetector_Demo.cpp index 2b8471d35b..894b01ce56 100644 --- a/samples/cpp/tutorial_code/TrackingMotion/cornerDetector_Demo.cpp +++ b/samples/cpp/tutorial_code/TrackingMotion/cornerDetector_Demo.cpp @@ -13,15 +13,15 @@ using namespace std; /// Global variables Mat src, src_gray; -Mat myHarris_dst; Mat myHarris_copy; Mat Mc; -Mat myShiTomasi_dst; Mat myShiTomasi_copy; +Mat myHarris_dst, myHarris_copy, Mc; +Mat myShiTomasi_dst, myShiTomasi_copy; int myShiTomasi_qualityLevel = 50; int myHarris_qualityLevel = 50; int max_qualityLevel = 100; -double myHarris_minVal; double myHarris_maxVal; -double myShiTomasi_minVal; double myShiTomasi_maxVal; +double myHarris_minVal, myHarris_maxVal; +double myShiTomasi_minVal, myShiTomasi_maxVal; RNG rng(12345); @@ -37,56 +37,54 @@ void myHarris_function( int, void* ); */ int main( int argc, char** argv ) { - /// Load source image and convert it to gray - CommandLineParser parser( argc, argv, "{@input | ../data/stuff.jpg | input image}" ); - src = imread( parser.get( "@input" ), IMREAD_COLOR ); - if ( src.empty() ) - { - cout << "Could not open or find the image!\n" << endl; - cout << "Usage: " << argv[0] << " " << endl; - return -1; - } - cvtColor( src, src_gray, COLOR_BGR2GRAY ); - - /// Set some parameters - int blockSize = 3; int apertureSize = 3; - - /// My Harris matrix -- Using cornerEigenValsAndVecs - myHarris_dst = Mat::zeros( src_gray.size(), CV_32FC(6) ); - Mc = Mat::zeros( src_gray.size(), CV_32FC1 ); - - cornerEigenValsAndVecs( src_gray, myHarris_dst, blockSize, apertureSize, BORDER_DEFAULT ); - - /* calculate Mc */ - for( int j = 0; j < src_gray.rows; j++ ) - { for( int i = 0; i < src_gray.cols; i++ ) - { - float lambda_1 = myHarris_dst.at(j, i)[0]; - float lambda_2 = myHarris_dst.at(j, i)[1]; - Mc.at(j,i) = lambda_1*lambda_2 - 0.04f*pow( ( lambda_1 + lambda_2 ), 2 ); - } - } - - minMaxLoc( Mc, &myHarris_minVal, &myHarris_maxVal, 0, 0, Mat() ); - - /* Create Window and Trackbar */ - namedWindow( myHarris_window, WINDOW_AUTOSIZE ); - createTrackbar( " Quality Level:", myHarris_window, &myHarris_qualityLevel, max_qualityLevel, myHarris_function ); - myHarris_function( 0, 0 ); - - /// My Shi-Tomasi -- Using cornerMinEigenVal - myShiTomasi_dst = Mat::zeros( src_gray.size(), CV_32FC1 ); - cornerMinEigenVal( src_gray, myShiTomasi_dst, blockSize, apertureSize, BORDER_DEFAULT ); - - minMaxLoc( myShiTomasi_dst, &myShiTomasi_minVal, &myShiTomasi_maxVal, 0, 0, Mat() ); - - /* Create Window and Trackbar */ - namedWindow( myShiTomasi_window, WINDOW_AUTOSIZE ); - createTrackbar( " Quality Level:", myShiTomasi_window, &myShiTomasi_qualityLevel, max_qualityLevel, myShiTomasi_function ); - myShiTomasi_function( 0, 0 ); - - waitKey(0); - return(0); + /// Load source image and convert it to gray + CommandLineParser parser( argc, argv, "{@input | ../data/building.jpg | input image}" ); + src = imread( parser.get( "@input" ) ); + if ( src.empty() ) + { + cout << "Could not open or find the image!\n" << endl; + cout << "Usage: " << argv[0] << " " << endl; + return -1; + } + cvtColor( src, src_gray, COLOR_BGR2GRAY ); + + /// Set some parameters + int blockSize = 3, apertureSize = 3; + + /// My Harris matrix -- Using cornerEigenValsAndVecs + cornerEigenValsAndVecs( src_gray, myHarris_dst, blockSize, apertureSize ); + + /* calculate Mc */ + Mc = Mat( src_gray.size(), CV_32FC1 ); + for( int i = 0; i < src_gray.rows; i++ ) + { + for( int j = 0; j < src_gray.cols; j++ ) + { + float lambda_1 = myHarris_dst.at(i, j)[0]; + float lambda_2 = myHarris_dst.at(i, j)[1]; + Mc.at(i, j) = lambda_1*lambda_2 - 0.04f*pow( ( lambda_1 + lambda_2 ), 2 ); + } + } + + minMaxLoc( Mc, &myHarris_minVal, &myHarris_maxVal ); + + /* Create Window and Trackbar */ + namedWindow( myHarris_window ); + createTrackbar( "Quality Level:", myHarris_window, &myHarris_qualityLevel, max_qualityLevel, myHarris_function ); + myHarris_function( 0, 0 ); + + /// My Shi-Tomasi -- Using cornerMinEigenVal + cornerMinEigenVal( src_gray, myShiTomasi_dst, blockSize, apertureSize ); + + minMaxLoc( myShiTomasi_dst, &myShiTomasi_minVal, &myShiTomasi_maxVal ); + + /* Create Window and Trackbar */ + namedWindow( myShiTomasi_window ); + createTrackbar( "Quality Level:", myShiTomasi_window, &myShiTomasi_qualityLevel, max_qualityLevel, myShiTomasi_function ); + myShiTomasi_function( 0, 0 ); + + waitKey(); + return 0; } /** @@ -94,18 +92,20 @@ int main( int argc, char** argv ) */ void myShiTomasi_function( int, void* ) { - myShiTomasi_copy = src.clone(); - - if( myShiTomasi_qualityLevel < 1 ) { myShiTomasi_qualityLevel = 1; } - - for( int j = 0; j < src_gray.rows; j++ ) - { for( int i = 0; i < src_gray.cols; i++ ) - { - if( myShiTomasi_dst.at(j,i) > myShiTomasi_minVal + ( myShiTomasi_maxVal - myShiTomasi_minVal )*myShiTomasi_qualityLevel/max_qualityLevel ) - { circle( myShiTomasi_copy, Point(i,j), 4, Scalar( rng.uniform(0,255), rng.uniform(0,255), rng.uniform(0,255) ), -1, 8, 0 ); } - } - } - imshow( myShiTomasi_window, myShiTomasi_copy ); + myShiTomasi_copy = src.clone(); + myShiTomasi_qualityLevel = MAX(myShiTomasi_qualityLevel, 1); + + for( int i = 0; i < src_gray.rows; i++ ) + { + for( int j = 0; j < src_gray.cols; j++ ) + { + if( myShiTomasi_dst.at(i,j) > myShiTomasi_minVal + ( myShiTomasi_maxVal - myShiTomasi_minVal )*myShiTomasi_qualityLevel/max_qualityLevel ) + { + circle( myShiTomasi_copy, Point(j,i), 4, Scalar( rng.uniform(0,256), rng.uniform(0,256), rng.uniform(0,256) ), FILLED ); + } + } + } + imshow( myShiTomasi_window, myShiTomasi_copy ); } /** @@ -113,16 +113,18 @@ void myShiTomasi_function( int, void* ) */ void myHarris_function( int, void* ) { - myHarris_copy = src.clone(); - - if( myHarris_qualityLevel < 1 ) { myHarris_qualityLevel = 1; } - - for( int j = 0; j < src_gray.rows; j++ ) - { for( int i = 0; i < src_gray.cols; i++ ) - { - if( Mc.at(j,i) > myHarris_minVal + ( myHarris_maxVal - myHarris_minVal )*myHarris_qualityLevel/max_qualityLevel ) - { circle( myHarris_copy, Point(i,j), 4, Scalar( rng.uniform(0,255), rng.uniform(0,255), rng.uniform(0,255) ), -1, 8, 0 ); } - } - } - imshow( myHarris_window, myHarris_copy ); + myHarris_copy = src.clone(); + myHarris_qualityLevel = MAX(myHarris_qualityLevel, 1); + + for( int i = 0; i < src_gray.rows; i++ ) + { + for( int j = 0; j < src_gray.cols; j++ ) + { + if( Mc.at(i,j) > myHarris_minVal + ( myHarris_maxVal - myHarris_minVal )*myHarris_qualityLevel/max_qualityLevel ) + { + circle( myHarris_copy, Point(j,i), 4, Scalar( rng.uniform(0,256), rng.uniform(0,256), rng.uniform(0,256) ), FILLED ); + } + } + } + imshow( myHarris_window, myHarris_copy ); } diff --git a/samples/cpp/tutorial_code/TrackingMotion/cornerHarris_Demo.cpp b/samples/cpp/tutorial_code/TrackingMotion/cornerHarris_Demo.cpp index 2d44eeb4f6..35672706bc 100644 --- a/samples/cpp/tutorial_code/TrackingMotion/cornerHarris_Demo.cpp +++ b/samples/cpp/tutorial_code/TrackingMotion/cornerHarris_Demo.cpp @@ -27,26 +27,26 @@ void cornerHarris_demo( int, void* ); */ int main( int argc, char** argv ) { - /// Load source image and convert it to gray - CommandLineParser parser( argc, argv, "{@input | ../data/building.jpg | input image}" ); - src = imread( parser.get( "@input" ), IMREAD_COLOR ); - if ( src.empty() ) - { - cout << "Could not open or find the image!\n" << endl; - cout << "Usage: " << argv[0] << " " << endl; - return -1; - } - cvtColor( src, src_gray, COLOR_BGR2GRAY ); + /// Load source image and convert it to gray + CommandLineParser parser( argc, argv, "{@input | ../data/building.jpg | input image}" ); + src = imread( parser.get( "@input" ) ); + if ( src.empty() ) + { + cout << "Could not open or find the image!\n" << endl; + cout << "Usage: " << argv[0] << " " << endl; + return -1; + } + cvtColor( src, src_gray, COLOR_BGR2GRAY ); - /// Create a window and a trackbar - namedWindow( source_window, WINDOW_AUTOSIZE ); - createTrackbar( "Threshold: ", source_window, &thresh, max_thresh, cornerHarris_demo ); - imshow( source_window, src ); + /// Create a window and a trackbar + namedWindow( source_window ); + createTrackbar( "Threshold: ", source_window, &thresh, max_thresh, cornerHarris_demo ); + imshow( source_window, src ); - cornerHarris_demo( 0, 0 ); + cornerHarris_demo( 0, 0 ); - waitKey(0); - return(0); + waitKey(); + return 0; } /** @@ -55,33 +55,33 @@ int main( int argc, char** argv ) */ void cornerHarris_demo( int, void* ) { + /// Detector parameters + int blockSize = 2; + int apertureSize = 3; + double k = 0.04; - Mat dst, dst_norm, dst_norm_scaled; - dst = Mat::zeros( src.size(), CV_32FC1 ); + /// Detecting corners + Mat dst = Mat::zeros( src.size(), CV_32FC1 ); + cornerHarris( src_gray, dst, blockSize, apertureSize, k ); - /// Detector parameters - int blockSize = 2; - int apertureSize = 3; - double k = 0.04; + /// Normalizing + Mat dst_norm, dst_norm_scaled; + normalize( dst, dst_norm, 0, 255, NORM_MINMAX, CV_32FC1, Mat() ); + convertScaleAbs( dst_norm, dst_norm_scaled ); - /// Detecting corners - cornerHarris( src_gray, dst, blockSize, apertureSize, k, BORDER_DEFAULT ); + /// Drawing a circle around corners + for( int i = 0; i < dst_norm.rows ; i++ ) + { + for( int j = 0; j < dst_norm.cols; j++ ) + { + if( (int) dst_norm.at(i,j) > thresh ) + { + circle( dst_norm_scaled, Point(j,i), 5, Scalar(0), 2, 8, 0 ); + } + } + } - /// Normalizing - normalize( dst, dst_norm, 0, 255, NORM_MINMAX, CV_32FC1, Mat() ); - convertScaleAbs( dst_norm, dst_norm_scaled ); - - /// Drawing a circle around corners - for( int j = 0; j < dst_norm.rows ; j++ ) - { for( int i = 0; i < dst_norm.cols; i++ ) - { - if( (int) dst_norm.at(j,i) > thresh ) - { - circle( dst_norm_scaled, Point( i, j ), 5, Scalar(0), 2, 8, 0 ); - } - } - } - /// Showing the result - namedWindow( corners_window, WINDOW_AUTOSIZE ); - imshow( corners_window, dst_norm_scaled ); + /// Showing the result + namedWindow( corners_window ); + imshow( corners_window, dst_norm_scaled ); } diff --git a/samples/cpp/tutorial_code/TrackingMotion/cornerSubPix_Demo.cpp b/samples/cpp/tutorial_code/TrackingMotion/cornerSubPix_Demo.cpp index 0addc5503e..853078bada 100644 --- a/samples/cpp/tutorial_code/TrackingMotion/cornerSubPix_Demo.cpp +++ b/samples/cpp/tutorial_code/TrackingMotion/cornerSubPix_Demo.cpp @@ -28,29 +28,29 @@ void goodFeaturesToTrack_Demo( int, void* ); */ int main( int argc, char** argv ) { - /// Load source image and convert it to gray - CommandLineParser parser( argc, argv, "{@input | ../data/pic3.png | input image}" ); - src = imread(parser.get( "@input" ), IMREAD_COLOR); - if ( src.empty() ) - { - cout << "Could not open or find the image!\n" << endl; - cout << "Usage: " << argv[0] << " " << endl; - return -1; - } - cvtColor( src, src_gray, COLOR_BGR2GRAY ); + /// Load source image and convert it to gray + CommandLineParser parser( argc, argv, "{@input | ../data/pic3.png | input image}" ); + src = imread( parser.get( "@input" ) ); + if( src.empty() ) + { + cout << "Could not open or find the image!\n" << endl; + cout << "Usage: " << argv[0] << " " << endl; + return -1; + } + cvtColor( src, src_gray, COLOR_BGR2GRAY ); - /// Create Window - namedWindow( source_window, WINDOW_AUTOSIZE ); + /// Create Window + namedWindow( source_window ); - /// Create Trackbar to set the number of corners - createTrackbar( "Max corners:", source_window, &maxCorners, maxTrackbar, goodFeaturesToTrack_Demo ); + /// Create Trackbar to set the number of corners + createTrackbar( "Max corners:", source_window, &maxCorners, maxTrackbar, goodFeaturesToTrack_Demo ); - imshow( source_window, src ); + imshow( source_window, src ); - goodFeaturesToTrack_Demo( 0, 0 ); + goodFeaturesToTrack_Demo( 0, 0 ); - waitKey(0); - return(0); + waitKey(); + return 0; } /** @@ -59,52 +59,54 @@ int main( int argc, char** argv ) */ void goodFeaturesToTrack_Demo( int, void* ) { - if( maxCorners < 1 ) { maxCorners = 1; } - - /// Parameters for Shi-Tomasi algorithm - vector corners; - double qualityLevel = 0.01; - double minDistance = 10; - int blockSize = 3, gradiantSize = 3; - bool useHarrisDetector = false; - double k = 0.04; - - /// Copy the source image - Mat copy; - copy = src.clone(); - - /// Apply corner detection - goodFeaturesToTrack( src_gray, - corners, - maxCorners, - qualityLevel, - minDistance, - Mat(), - blockSize, - gradiantSize, - useHarrisDetector, - k ); - - - /// Draw corners detected - cout<<"** Number of corners detected: "< corners; + double qualityLevel = 0.01; + double minDistance = 10; + int blockSize = 3, gradientSize = 3; + bool useHarrisDetector = false; + double k = 0.04; + + /// Copy the source image + Mat copy = src.clone(); + + /// Apply corner detection + goodFeaturesToTrack( src_gray, + corners, + maxCorners, + qualityLevel, + minDistance, + Mat(), + blockSize, + gradientSize, + useHarrisDetector, + k ); + + + /// Draw corners detected + cout << "** Number of corners detected: " << corners.size() << endl; + int radius = 4; + for( size_t i = 0; i < corners.size(); i++ ) + { + circle( copy, corners[i], radius, Scalar(rng.uniform(0,255), rng.uniform(0, 256), rng.uniform(0, 256)), FILLED ); + } + + /// Show what you got + namedWindow( source_window ); + imshow( source_window, copy ); + + /// Set the needed parameters to find the refined corners + Size winSize = Size( 5, 5 ); + Size zeroZone = Size( -1, -1 ); + TermCriteria criteria = TermCriteria( TermCriteria::EPS + TermCriteria::COUNT, 40, 0.001 ); + + /// Calculate the refined corner locations + cornerSubPix( src_gray, corners, winSize, zeroZone, criteria ); + + /// Write them down + for( size_t i = 0; i < corners.size(); i++ ) + { + cout << " -- Refined Corner [" << i << "] (" << corners[i].x << "," << corners[i].y << ")" << endl; + } } diff --git a/samples/cpp/tutorial_code/TrackingMotion/goodFeaturesToTrack_Demo.cpp b/samples/cpp/tutorial_code/TrackingMotion/goodFeaturesToTrack_Demo.cpp index e72653f41e..022cd456d1 100644 --- a/samples/cpp/tutorial_code/TrackingMotion/goodFeaturesToTrack_Demo.cpp +++ b/samples/cpp/tutorial_code/TrackingMotion/goodFeaturesToTrack_Demo.cpp @@ -29,29 +29,29 @@ void goodFeaturesToTrack_Demo( int, void* ); */ int main( int argc, char** argv ) { - /// Load source image and convert it to gray - CommandLineParser parser( argc, argv, "{@input | ../data/pic3.png | input image}" ); - src = imread( parser.get( "@input" ), IMREAD_COLOR ); - if( src.empty() ) - { - cout << "Could not open or find the image!\n" << endl; - cout << "Usage: " << argv[0] << " " << endl; - return -1; - } - cvtColor( src, src_gray, COLOR_BGR2GRAY ); + /// Load source image and convert it to gray + CommandLineParser parser( argc, argv, "{@input | ../data/pic3.png | input image}" ); + src = imread( parser.get( "@input" ) ); + if( src.empty() ) + { + cout << "Could not open or find the image!\n" << endl; + cout << "Usage: " << argv[0] << " " << endl; + return -1; + } + cvtColor( src, src_gray, COLOR_BGR2GRAY ); - /// Create Window - namedWindow( source_window, WINDOW_AUTOSIZE ); + /// Create Window + namedWindow( source_window ); - /// Create Trackbar to set the number of corners - createTrackbar( "Max corners:", source_window, &maxCorners, maxTrackbar, goodFeaturesToTrack_Demo ); + /// Create Trackbar to set the number of corners + createTrackbar( "Max corners:", source_window, &maxCorners, maxTrackbar, goodFeaturesToTrack_Demo ); - imshow( source_window, src ); + imshow( source_window, src ); - goodFeaturesToTrack_Demo( 0, 0 ); + goodFeaturesToTrack_Demo( 0, 0 ); - waitKey(0); - return(0); + waitKey(); + return 0; } /** @@ -60,40 +60,40 @@ int main( int argc, char** argv ) */ void goodFeaturesToTrack_Demo( int, void* ) { - if( maxCorners < 1 ) { maxCorners = 1; } - - /// Parameters for Shi-Tomasi algorithm - vector corners; - double qualityLevel = 0.01; - double minDistance = 10; - int blockSize = 3, gradiantSize = 3; - bool useHarrisDetector = false; - double k = 0.04; - - /// Copy the source image - Mat copy; - copy = src.clone(); - - /// Apply corner detection - goodFeaturesToTrack( src_gray, - corners, - maxCorners, - qualityLevel, - minDistance, - Mat(), - blockSize, - gradiantSize, - useHarrisDetector, - k ); - - - /// Draw corners detected - cout<<"** Number of corners detected: "< corners; + double qualityLevel = 0.01; + double minDistance = 10; + int blockSize = 3, gradientSize = 3; + bool useHarrisDetector = false; + double k = 0.04; + + /// Copy the source image + Mat copy = src.clone(); + + /// Apply corner detection + goodFeaturesToTrack( src_gray, + corners, + maxCorners, + qualityLevel, + minDistance, + Mat(), + blockSize, + gradientSize, + useHarrisDetector, + k ); + + + /// Draw corners detected + cout << "** Number of corners detected: " << corners.size() << endl; + int radius = 4; + for( size_t i = 0; i < corners.size(); i++ ) + { + circle( copy, corners[i], radius, Scalar(rng.uniform(0,255), rng.uniform(0, 256), rng.uniform(0, 256)), FILLED ); + } + + /// Show what you got + namedWindow( source_window ); + imshow( source_window, copy ); } diff --git a/samples/cpp/tutorial_code/features2D/feature_description/SURF_matching_Demo.cpp b/samples/cpp/tutorial_code/features2D/feature_description/SURF_matching_Demo.cpp new file mode 100755 index 0000000000..3fb34e9f1b --- /dev/null +++ b/samples/cpp/tutorial_code/features2D/feature_description/SURF_matching_Demo.cpp @@ -0,0 +1,60 @@ +#include +#include "opencv2/core.hpp" +#ifdef HAVE_OPENCV_XFEATURES2D +#include "opencv2/highgui.hpp" +#include "opencv2/features2d.hpp" +#include "opencv2/xfeatures2d.hpp" + +using namespace cv; +using namespace cv::xfeatures2d; +using std::cout; +using std::endl; + +const char* keys = + "{ help h | | Print help message. }" + "{ input1 | ../data/box.png | Path to input image 1. }" + "{ input2 | ../data/box_in_scene.png | Path to input image 2. }"; + +int main( int argc, char* argv[] ) +{ + CommandLineParser parser( argc, argv, keys ); + Mat img1 = imread( parser.get("input1"), IMREAD_GRAYSCALE ); + Mat img2 = imread( parser.get("input2"), IMREAD_GRAYSCALE ); + if ( img1.empty() || img2.empty() ) + { + cout << "Could not open or find the image!\n" << endl; + parser.printMessage(); + return -1; + } + + //-- Step 1: Detect the keypoints using SURF Detector, compute the descriptors + int minHessian = 400; + Ptr detector = SURF::create( minHessian ); + std::vector keypoints1, keypoints2; + Mat descriptors1, descriptors2; + detector->detectAndCompute( img1, noArray(), keypoints1, descriptors1 ); + detector->detectAndCompute( img2, noArray(), keypoints2, descriptors2 ); + + //-- Step 2: Matching descriptor vectors with a brute force matcher + // Since SURF is a floating-point descriptor NORM_L2 is used + Ptr matcher = DescriptorMatcher::create(DescriptorMatcher::BRUTEFORCE); + std::vector< DMatch > matches; + matcher->match( descriptors1, descriptors2, matches ); + + //-- Draw matches + Mat img_matches; + drawMatches( img1, keypoints1, img2, keypoints2, matches, img_matches ); + + //-- Show detected matches + imshow("Matches", img_matches ); + + waitKey(); + return 0; +} +#else +int main() +{ + std::cout << "This tutorial code needs the xfeatures2d contrib module to be run." << std::endl; + return 0; +} +#endif diff --git a/samples/cpp/tutorial_code/features2D/feature_detection/SURF_detection_Demo.cpp b/samples/cpp/tutorial_code/features2D/feature_detection/SURF_detection_Demo.cpp new file mode 100755 index 0000000000..ba9494ed2c --- /dev/null +++ b/samples/cpp/tutorial_code/features2D/feature_detection/SURF_detection_Demo.cpp @@ -0,0 +1,46 @@ +#include +#include "opencv2/core.hpp" +#ifdef HAVE_OPENCV_XFEATURES2D +#include "opencv2/highgui.hpp" +#include "opencv2/features2d.hpp" +#include "opencv2/xfeatures2d.hpp" + +using namespace cv; +using namespace cv::xfeatures2d; +using std::cout; +using std::endl; + +int main( int argc, char* argv[] ) +{ + CommandLineParser parser( argc, argv, "{@input | ../data/box.png | input image}" ); + Mat src = imread( parser.get( "@input" ), IMREAD_GRAYSCALE ); + if ( src.empty() ) + { + cout << "Could not open or find the image!\n" << endl; + cout << "Usage: " << argv[0] << " " << endl; + return -1; + } + + //-- Step 1: Detect the keypoints using SURF Detector + int minHessian = 400; + Ptr detector = SURF::create( minHessian ); + std::vector keypoints; + detector->detect( src, keypoints ); + + //-- Draw keypoints + Mat img_keypoints; + drawKeypoints( src, keypoints, img_keypoints ); + + //-- Show detected (drawn) keypoints + imshow("SURF Keypoints", img_keypoints ); + + waitKey(); + return 0; +} +#else +int main() +{ + std::cout << "This tutorial code needs the xfeatures2d contrib module to be run." << std::endl; + return 0; +} +#endif diff --git a/samples/cpp/tutorial_code/features2D/feature_flann_matcher/SURF_FLANN_matching_Demo.cpp b/samples/cpp/tutorial_code/features2D/feature_flann_matcher/SURF_FLANN_matching_Demo.cpp new file mode 100755 index 0000000000..e22155f471 --- /dev/null +++ b/samples/cpp/tutorial_code/features2D/feature_flann_matcher/SURF_FLANN_matching_Demo.cpp @@ -0,0 +1,72 @@ +#include +#include "opencv2/core.hpp" +#ifdef HAVE_OPENCV_XFEATURES2D +#include "opencv2/highgui.hpp" +#include "opencv2/features2d.hpp" +#include "opencv2/xfeatures2d.hpp" + +using namespace cv; +using namespace cv::xfeatures2d; +using std::cout; +using std::endl; + +const char* keys = + "{ help h | | Print help message. }" + "{ input1 | ../data/box.png | Path to input image 1. }" + "{ input2 | ../data/box_in_scene.png | Path to input image 2. }"; + +int main( int argc, char* argv[] ) +{ + CommandLineParser parser( argc, argv, keys ); + Mat img1 = imread( parser.get("input1"), IMREAD_GRAYSCALE ); + Mat img2 = imread( parser.get("input2"), IMREAD_GRAYSCALE ); + if ( img1.empty() || img2.empty() ) + { + cout << "Could not open or find the image!\n" << endl; + parser.printMessage(); + return -1; + } + + //-- Step 1: Detect the keypoints using SURF Detector, compute the descriptors + int minHessian = 400; + Ptr detector = SURF::create( minHessian ); + std::vector keypoints1, keypoints2; + Mat descriptors1, descriptors2; + detector->detectAndCompute( img1, noArray(), keypoints1, descriptors1 ); + detector->detectAndCompute( img2, noArray(), keypoints2, descriptors2 ); + + //-- Step 2: Matching descriptor vectors with a FLANN based matcher + // Since SURF is a floating-point descriptor NORM_L2 is used + Ptr matcher = DescriptorMatcher::create(DescriptorMatcher::FLANNBASED); + std::vector< std::vector > knn_matches; + matcher->knnMatch( descriptors1, descriptors2, knn_matches, 2 ); + + //-- Filter matches using the Lowe's ratio test + const float ratio_thresh = 0.7f; + std::vector good_matches; + for (size_t i = 0; i < knn_matches.size(); i++) + { + if (knn_matches[i].size() > 1 && knn_matches[i][0].distance / knn_matches[i][1].distance <= ratio_thresh) + { + good_matches.push_back(knn_matches[i][0]); + } + } + + //-- Draw matches + Mat img_matches; + drawMatches( img1, keypoints1, img2, keypoints2, good_matches, img_matches, Scalar::all(-1), + Scalar::all(-1), std::vector(), DrawMatchesFlags::NOT_DRAW_SINGLE_POINTS ); + + //-- Show detected matches + imshow("Good Matches", img_matches ); + + waitKey(); + return 0; +} +#else +int main() +{ + std::cout << "This tutorial code needs the xfeatures2d contrib module to be run." << std::endl; + return 0; +} +#endif diff --git a/samples/cpp/tutorial_code/features2D/feature_homography/SURF_FLANN_matching_homography_Demo.cpp b/samples/cpp/tutorial_code/features2D/feature_homography/SURF_FLANN_matching_homography_Demo.cpp new file mode 100755 index 0000000000..68b1d2a720 --- /dev/null +++ b/samples/cpp/tutorial_code/features2D/feature_homography/SURF_FLANN_matching_homography_Demo.cpp @@ -0,0 +1,107 @@ +#include +#include "opencv2/core.hpp" +#ifdef HAVE_OPENCV_XFEATURES2D +#include "opencv2/calib3d.hpp" +#include "opencv2/highgui.hpp" +#include "opencv2/imgproc.hpp" +#include "opencv2/features2d.hpp" +#include "opencv2/xfeatures2d.hpp" + +using namespace cv; +using namespace cv::xfeatures2d; +using std::cout; +using std::endl; + +const char* keys = + "{ help h | | Print help message. }" + "{ input1 | ../data/box.png | Path to input image 1. }" + "{ input2 | ../data/box_in_scene.png | Path to input image 2. }"; + +int main( int argc, char* argv[] ) +{ + CommandLineParser parser( argc, argv, keys ); + Mat img_object = imread( parser.get("input1"), IMREAD_GRAYSCALE ); + Mat img_scene = imread( parser.get("input2"), IMREAD_GRAYSCALE ); + if ( img_object.empty() || img_scene.empty() ) + { + cout << "Could not open or find the image!\n" << endl; + parser.printMessage(); + return -1; + } + + //-- Step 1: Detect the keypoints using SURF Detector, compute the descriptors + int minHessian = 400; + Ptr detector = SURF::create( minHessian ); + std::vector keypoints_object, keypoints_scene; + Mat descriptors_object, descriptors_scene; + detector->detectAndCompute( img_object, noArray(), keypoints_object, descriptors_object ); + detector->detectAndCompute( img_scene, noArray(), keypoints_scene, descriptors_scene ); + + //-- Step 2: Matching descriptor vectors with a FLANN based matcher + // Since SURF is a floating-point descriptor NORM_L2 is used + Ptr matcher = DescriptorMatcher::create(DescriptorMatcher::FLANNBASED); + std::vector< std::vector > knn_matches; + matcher->knnMatch( descriptors_object, descriptors_scene, knn_matches, 2 ); + + //-- Filter matches using the Lowe's ratio test + const float ratio_thresh = 0.75f; + std::vector good_matches; + for (size_t i = 0; i < knn_matches.size(); i++) + { + if (knn_matches[i].size() > 1 && knn_matches[i][0].distance / knn_matches[i][1].distance <= ratio_thresh) + { + good_matches.push_back(knn_matches[i][0]); + } + } + + //-- Draw matches + Mat img_matches; + drawMatches( img_object, keypoints_object, img_scene, keypoints_scene, good_matches, img_matches, Scalar::all(-1), + Scalar::all(-1), std::vector(), DrawMatchesFlags::NOT_DRAW_SINGLE_POINTS ); + + //-- Localize the object + std::vector obj; + std::vector scene; + + for( size_t i = 0; i < good_matches.size(); i++ ) + { + //-- Get the keypoints from the good matches + obj.push_back( keypoints_object[ good_matches[i].queryIdx ].pt ); + scene.push_back( keypoints_scene[ good_matches[i].trainIdx ].pt ); + } + + Mat H = findHomography( obj, scene, RANSAC ); + + //-- Get the corners from the image_1 ( the object to be "detected" ) + std::vector obj_corners(4); + obj_corners[0] = Point2f(0, 0); + obj_corners[1] = Point2f( (float)img_object.cols, 0 ); + obj_corners[2] = Point2f( (float)img_object.cols, (float)img_object.rows ); + obj_corners[3] = Point2f( 0, (float)img_object.rows ); + std::vector scene_corners(4); + + perspectiveTransform( obj_corners, scene_corners, H); + + //-- Draw lines between the corners (the mapped object in the scene - image_2 ) + line( img_matches, scene_corners[0] + Point2f((float)img_object.cols, 0), + scene_corners[1] + Point2f((float)img_object.cols, 0), Scalar(0, 255, 0), 4 ); + line( img_matches, scene_corners[1] + Point2f((float)img_object.cols, 0), + scene_corners[2] + Point2f((float)img_object.cols, 0), Scalar( 0, 255, 0), 4 ); + line( img_matches, scene_corners[2] + Point2f((float)img_object.cols, 0), + scene_corners[3] + Point2f((float)img_object.cols, 0), Scalar( 0, 255, 0), 4 ); + line( img_matches, scene_corners[3] + Point2f((float)img_object.cols, 0), + scene_corners[0] + Point2f((float)img_object.cols, 0), Scalar( 0, 255, 0), 4 ); + + //-- Show detected matches + imshow("Good Matches & Object detection", img_matches ); + + waitKey(); + return 0; +} +#else +int main() +{ + std::cout << "This tutorial code needs the xfeatures2d contrib module to be run." << std::endl; + return 0; +} +#endif diff --git a/samples/java/tutorial_code/TrackingMotion/corner_subpixels/CornerSubPixDemo.java b/samples/java/tutorial_code/TrackingMotion/corner_subpixels/CornerSubPixDemo.java new file mode 100644 index 0000000000..3be2e58056 --- /dev/null +++ b/samples/java/tutorial_code/TrackingMotion/corner_subpixels/CornerSubPixDemo.java @@ -0,0 +1,158 @@ +import java.awt.BorderLayout; +import java.awt.Container; +import java.awt.Image; +import java.util.Random; + +import javax.swing.BoxLayout; +import javax.swing.ImageIcon; +import javax.swing.JFrame; +import javax.swing.JLabel; +import javax.swing.JPanel; +import javax.swing.JSlider; +import javax.swing.event.ChangeEvent; +import javax.swing.event.ChangeListener; + +import org.opencv.core.Core; +import org.opencv.core.CvType; +import org.opencv.core.Mat; +import org.opencv.core.MatOfPoint; +import org.opencv.core.Point; +import org.opencv.core.Scalar; +import org.opencv.core.Size; +import org.opencv.core.TermCriteria; +import org.opencv.highgui.HighGui; +import org.opencv.imgcodecs.Imgcodecs; +import org.opencv.imgproc.Imgproc; + +class CornerSubPix { + private Mat src = new Mat(); + private Mat srcGray = new Mat(); + private JFrame frame; + private JLabel imgLabel; + private static final int MAX_CORNERS = 25; + private int maxCorners = 10; + private Random rng = new Random(12345); + + public CornerSubPix(String[] args) { + /// Load source image and convert it to gray + String filename = args.length > 0 ? args[0] : "../data/pic3.png"; + src = Imgcodecs.imread(filename); + if (src.empty()) { + System.err.println("Cannot read image: " + filename); + System.exit(0); + } + + Imgproc.cvtColor(src, srcGray, Imgproc.COLOR_BGR2GRAY); + + // Create and set up the window. + frame = new JFrame("Shi-Tomasi corner detector demo"); + frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); + // Set up the content pane. + Image img = HighGui.toBufferedImage(src); + addComponentsToPane(frame.getContentPane(), img); + // Use the content pane's default BorderLayout. No need for + // setLayout(new BorderLayout()); + // Display the window. + frame.pack(); + frame.setVisible(true); + update(); + } + + private void addComponentsToPane(Container pane, Image img) { + if (!(pane.getLayout() instanceof BorderLayout)) { + pane.add(new JLabel("Container doesn't use BorderLayout!")); + return; + } + + JPanel sliderPanel = new JPanel(); + sliderPanel.setLayout(new BoxLayout(sliderPanel, BoxLayout.PAGE_AXIS)); + + sliderPanel.add(new JLabel("Max corners:")); + JSlider slider = new JSlider(0, MAX_CORNERS, maxCorners); + slider.setMajorTickSpacing(20); + slider.setMinorTickSpacing(10); + slider.setPaintTicks(true); + slider.setPaintLabels(true); + slider.addChangeListener(new ChangeListener() { + @Override + public void stateChanged(ChangeEvent e) { + JSlider source = (JSlider) e.getSource(); + maxCorners = source.getValue(); + update(); + } + }); + sliderPanel.add(slider); + pane.add(sliderPanel, BorderLayout.PAGE_START); + + imgLabel = new JLabel(new ImageIcon(img)); + pane.add(imgLabel, BorderLayout.CENTER); + } + + private void update() { + /// Parameters for Shi-Tomasi algorithm + maxCorners = Math.max(maxCorners, 1); + MatOfPoint corners = new MatOfPoint(); + double qualityLevel = 0.01; + double minDistance = 10; + int blockSize = 3, gradientSize = 3; + boolean useHarrisDetector = false; + double k = 0.04; + + /// Copy the source image + Mat copy = src.clone(); + + /// Apply corner detection + Imgproc.goodFeaturesToTrack(srcGray, corners, maxCorners, qualityLevel, minDistance, new Mat(), + blockSize, gradientSize, useHarrisDetector, k); + + /// Draw corners detected + System.out.println("** Number of corners detected: " + corners.rows()); + int[] cornersData = new int[(int) (corners.total() * corners.channels())]; + corners.get(0, 0, cornersData); + int radius = 4; + Mat matCorners = new Mat(corners.rows(), 2, CvType.CV_32F); + float[] matCornersData = new float[(int) (matCorners.total() * matCorners.channels())]; + matCorners.get(0, 0, matCornersData); + for (int i = 0; i < corners.rows(); i++) { + Imgproc.circle(copy, new Point(cornersData[i * 2], cornersData[i * 2 + 1]), radius, + new Scalar(rng.nextInt(256), rng.nextInt(256), rng.nextInt(256)), Core.FILLED); + matCornersData[i * 2] = cornersData[i * 2]; + matCornersData[i * 2 + 1] = cornersData[i * 2 + 1]; + } + matCorners.put(0, 0, matCornersData); + + imgLabel.setIcon(new ImageIcon(HighGui.toBufferedImage(copy))); + frame.repaint(); + + /// Set the needed parameters to find the refined corners + Size winSize = new Size(5, 5); + Size zeroZone = new Size(-1, -1); + TermCriteria criteria = new TermCriteria(TermCriteria.EPS + TermCriteria.COUNT, 40, 0.001); + + /// Calculate the refined corner locations + Imgproc.cornerSubPix(srcGray, matCorners, winSize, zeroZone, criteria); + + /// Write them down + matCorners.get(0, 0, matCornersData); + for (int i = 0; i < corners.rows(); i++) { + System.out.println( + " -- Refined Corner [" + i + "] (" + matCornersData[i * 2] + "," + matCornersData[i * 2 + 1] + ")"); + } + } +} + +public class CornerSubPixDemo { + public static void main(String[] args) { + // Load the native OpenCV library + System.loadLibrary(Core.NATIVE_LIBRARY_NAME); + + // Schedule a job for the event dispatch thread: + // creating and showing this application's GUI. + javax.swing.SwingUtilities.invokeLater(new Runnable() { + @Override + public void run() { + new CornerSubPix(args); + } + }); + } +} diff --git a/samples/java/tutorial_code/TrackingMotion/generic_corner_detector/CornerDetectorDemo.java b/samples/java/tutorial_code/TrackingMotion/generic_corner_detector/CornerDetectorDemo.java new file mode 100644 index 0000000000..30450f8db1 --- /dev/null +++ b/samples/java/tutorial_code/TrackingMotion/generic_corner_detector/CornerDetectorDemo.java @@ -0,0 +1,190 @@ +import java.awt.BorderLayout; +import java.awt.Container; +import java.awt.Image; +import java.util.Random; + +import javax.swing.BoxLayout; +import javax.swing.ImageIcon; +import javax.swing.JFrame; +import javax.swing.JLabel; +import javax.swing.JPanel; +import javax.swing.JSlider; +import javax.swing.event.ChangeEvent; +import javax.swing.event.ChangeListener; + +import org.opencv.core.Core; +import org.opencv.core.Core.MinMaxLocResult; +import org.opencv.core.CvType; +import org.opencv.core.Mat; +import org.opencv.core.Point; +import org.opencv.core.Scalar; +import org.opencv.highgui.HighGui; +import org.opencv.imgcodecs.Imgcodecs; +import org.opencv.imgproc.Imgproc; + +class CornerDetector { + private Mat src = new Mat(); + private Mat srcGray = new Mat(); + private Mat harrisDst = new Mat(); + private Mat shiTomasiDst = new Mat(); + private Mat harrisCopy = new Mat(); + private Mat shiTomasiCopy = new Mat(); + private Mat Mc = new Mat(); + private JFrame frame; + private JLabel harrisImgLabel; + private JLabel shiTomasiImgLabel; + private static final int MAX_QUALITY_LEVEL = 100; + private int qualityLevel = 50; + private double harrisMinVal; + private double harrisMaxVal; + private double shiTomasiMinVal; + private double shiTomasiMaxVal; + private Random rng = new Random(12345); + + public CornerDetector(String[] args) { + /// Load source image and convert it to gray + String filename = args.length > 0 ? args[0] : "../data/building.jpg"; + src = Imgcodecs.imread(filename); + if (src.empty()) { + System.err.println("Cannot read image: " + filename); + System.exit(0); + } + + Imgproc.cvtColor(src, srcGray, Imgproc.COLOR_BGR2GRAY); + + // Create and set up the window. + frame = new JFrame("Creating your own corner detector demo"); + frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); + // Set up the content pane. + Image img = HighGui.toBufferedImage(src); + addComponentsToPane(frame.getContentPane(), img); + // Use the content pane's default BorderLayout. No need for + // setLayout(new BorderLayout()); + // Display the window. + frame.pack(); + frame.setVisible(true); + + /// Set some parameters + int blockSize = 3, apertureSize = 3; + + /// My Harris matrix -- Using cornerEigenValsAndVecs + Imgproc.cornerEigenValsAndVecs(srcGray, harrisDst, blockSize, apertureSize); + + /* calculate Mc */ + Mc = Mat.zeros(srcGray.size(), CvType.CV_32F); + + float[] harrisData = new float[(int) (harrisDst.total() * harrisDst.channels())]; + harrisDst.get(0, 0, harrisData); + float[] McData = new float[(int) (Mc.total() * Mc.channels())]; + Mc.get(0, 0, McData); + + for( int i = 0; i < srcGray.rows(); i++ ) { + for( int j = 0; j < srcGray.cols(); j++ ) { + float lambda1 = harrisData[(i*srcGray.cols() + j) * 6]; + float lambda2 = harrisData[(i*srcGray.cols() + j) * 6 + 1]; + McData[i*srcGray.cols()+j] = (float) (lambda1*lambda2 - 0.04f*Math.pow( ( lambda1 + lambda2 ), 2 )); + } + } + Mc.put(0, 0, McData); + + MinMaxLocResult res = Core.minMaxLoc(Mc); + harrisMinVal = res.minVal; + harrisMaxVal = res.maxVal; + + /// My Shi-Tomasi -- Using cornerMinEigenVal + Imgproc.cornerMinEigenVal(srcGray, shiTomasiDst, blockSize, apertureSize); + res = Core.minMaxLoc(shiTomasiDst); + shiTomasiMinVal = res.minVal; + shiTomasiMaxVal = res.maxVal; + + update(); + } + + private void addComponentsToPane(Container pane, Image img) { + if (!(pane.getLayout() instanceof BorderLayout)) { + pane.add(new JLabel("Container doesn't use BorderLayout!")); + return; + } + + JPanel sliderPanel = new JPanel(); + sliderPanel.setLayout(new BoxLayout(sliderPanel, BoxLayout.PAGE_AXIS)); + + sliderPanel.add(new JLabel("Max corners:")); + JSlider slider = new JSlider(0, MAX_QUALITY_LEVEL, qualityLevel); + slider.setMajorTickSpacing(20); + slider.setMinorTickSpacing(10); + slider.setPaintTicks(true); + slider.setPaintLabels(true); + slider.addChangeListener(new ChangeListener() { + @Override + public void stateChanged(ChangeEvent e) { + JSlider source = (JSlider) e.getSource(); + qualityLevel = source.getValue(); + update(); + } + }); + sliderPanel.add(slider); + pane.add(sliderPanel, BorderLayout.PAGE_START); + + JPanel imgPanel = new JPanel(); + harrisImgLabel = new JLabel(new ImageIcon(img)); + shiTomasiImgLabel = new JLabel(new ImageIcon(img)); + imgPanel.add(harrisImgLabel); + imgPanel.add(shiTomasiImgLabel); + pane.add(imgPanel, BorderLayout.CENTER); + } + + private void update() { + int qualityLevelVal = Math.max(qualityLevel, 1); + + //Harris + harrisCopy = src.clone(); + + float[] McData = new float[(int) (Mc.total() * Mc.channels())]; + Mc.get(0, 0, McData); + for (int i = 0; i < srcGray.rows(); i++) { + for (int j = 0; j < srcGray.cols(); j++) { + if (McData[i * srcGray.cols() + j] > harrisMinVal + + (harrisMaxVal - harrisMinVal) * qualityLevelVal / MAX_QUALITY_LEVEL) { + Imgproc.circle(harrisCopy, new Point(j, i), 4, + new Scalar(rng.nextInt(256), rng.nextInt(256), rng.nextInt(256)), Core.FILLED); + } + } + } + + //Shi-Tomasi + shiTomasiCopy = src.clone(); + + float[] shiTomasiData = new float[(int) (shiTomasiDst.total() * shiTomasiDst.channels())]; + shiTomasiDst.get(0, 0, shiTomasiData); + for (int i = 0; i < srcGray.rows(); i++) { + for (int j = 0; j < srcGray.cols(); j++) { + if (shiTomasiData[i * srcGray.cols() + j] > shiTomasiMinVal + + (shiTomasiMaxVal - shiTomasiMinVal) * qualityLevelVal / MAX_QUALITY_LEVEL) { + Imgproc.circle(shiTomasiCopy, new Point(j, i), 4, + new Scalar(rng.nextInt(256), rng.nextInt(256), rng.nextInt(256)), Core.FILLED); + } + } + } + + harrisImgLabel.setIcon(new ImageIcon(HighGui.toBufferedImage(harrisCopy))); + shiTomasiImgLabel.setIcon(new ImageIcon(HighGui.toBufferedImage(shiTomasiCopy))); + frame.repaint(); + } +} + +public class CornerDetectorDemo { + public static void main(String[] args) { + // Load the native OpenCV library + System.loadLibrary(Core.NATIVE_LIBRARY_NAME); + + // Schedule a job for the event dispatch thread: + // creating and showing this application's GUI. + javax.swing.SwingUtilities.invokeLater(new Runnable() { + @Override + public void run() { + new CornerDetector(args); + } + }); + } +} diff --git a/samples/java/tutorial_code/TrackingMotion/good_features_to_track/GoodFeaturesToTrackDemo.java b/samples/java/tutorial_code/TrackingMotion/good_features_to_track/GoodFeaturesToTrackDemo.java new file mode 100644 index 0000000000..b5ee732e84 --- /dev/null +++ b/samples/java/tutorial_code/TrackingMotion/good_features_to_track/GoodFeaturesToTrackDemo.java @@ -0,0 +1,134 @@ +import java.awt.BorderLayout; +import java.awt.Container; +import java.awt.Image; +import java.util.Random; + +import javax.swing.BoxLayout; +import javax.swing.ImageIcon; +import javax.swing.JFrame; +import javax.swing.JLabel; +import javax.swing.JPanel; +import javax.swing.JSlider; +import javax.swing.event.ChangeEvent; +import javax.swing.event.ChangeListener; + +import org.opencv.core.Core; +import org.opencv.core.Mat; +import org.opencv.core.MatOfPoint; +import org.opencv.core.Point; +import org.opencv.core.Scalar; +import org.opencv.highgui.HighGui; +import org.opencv.imgcodecs.Imgcodecs; +import org.opencv.imgproc.Imgproc; + +class GoodFeaturesToTrack { + private Mat src = new Mat(); + private Mat srcGray = new Mat(); + private JFrame frame; + private JLabel imgLabel; + private static final int MAX_THRESHOLD = 100; + private int maxCorners = 23; + private Random rng = new Random(12345); + + public GoodFeaturesToTrack(String[] args) { + /// Load source image and convert it to gray + String filename = args.length > 0 ? args[0] : "../data/pic3.png"; + src = Imgcodecs.imread(filename); + if (src.empty()) { + System.err.println("Cannot read image: " + filename); + System.exit(0); + } + + Imgproc.cvtColor(src, srcGray, Imgproc.COLOR_BGR2GRAY); + + // Create and set up the window. + frame = new JFrame("Shi-Tomasi corner detector demo"); + frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); + // Set up the content pane. + Image img = HighGui.toBufferedImage(src); + addComponentsToPane(frame.getContentPane(), img); + // Use the content pane's default BorderLayout. No need for + // setLayout(new BorderLayout()); + // Display the window. + frame.pack(); + frame.setVisible(true); + update(); + } + + private void addComponentsToPane(Container pane, Image img) { + if (!(pane.getLayout() instanceof BorderLayout)) { + pane.add(new JLabel("Container doesn't use BorderLayout!")); + return; + } + + JPanel sliderPanel = new JPanel(); + sliderPanel.setLayout(new BoxLayout(sliderPanel, BoxLayout.PAGE_AXIS)); + + sliderPanel.add(new JLabel("Max corners:")); + JSlider slider = new JSlider(0, MAX_THRESHOLD, maxCorners); + slider.setMajorTickSpacing(20); + slider.setMinorTickSpacing(10); + slider.setPaintTicks(true); + slider.setPaintLabels(true); + slider.addChangeListener(new ChangeListener() { + @Override + public void stateChanged(ChangeEvent e) { + JSlider source = (JSlider) e.getSource(); + maxCorners = source.getValue(); + update(); + } + }); + sliderPanel.add(slider); + pane.add(sliderPanel, BorderLayout.PAGE_START); + + imgLabel = new JLabel(new ImageIcon(img)); + pane.add(imgLabel, BorderLayout.CENTER); + } + + private void update() { + /// Parameters for Shi-Tomasi algorithm + maxCorners = Math.max(maxCorners, 1); + MatOfPoint corners = new MatOfPoint(); + double qualityLevel = 0.01; + double minDistance = 10; + int blockSize = 3, gradientSize = 3; + boolean useHarrisDetector = false; + double k = 0.04; + + /// Copy the source image + Mat copy = src.clone(); + + /// Apply corner detection + Imgproc.goodFeaturesToTrack(srcGray, corners, maxCorners, qualityLevel, minDistance, new Mat(), + blockSize, gradientSize, useHarrisDetector, k); + + /// Draw corners detected + System.out.println("** Number of corners detected: " + corners.rows()); + int[] cornersData = new int[(int) (corners.total() * corners.channels())]; + corners.get(0, 0, cornersData); + int radius = 4; + for (int i = 0; i < corners.rows(); i++) { + Imgproc.circle(copy, new Point(cornersData[i * 2], cornersData[i * 2 + 1]), radius, + new Scalar(rng.nextInt(256), rng.nextInt(256), rng.nextInt(256)), Core.FILLED); + } + + imgLabel.setIcon(new ImageIcon(HighGui.toBufferedImage(copy))); + frame.repaint(); + } +} + +public class GoodFeaturesToTrackDemo { + public static void main(String[] args) { + // Load the native OpenCV library + System.loadLibrary(Core.NATIVE_LIBRARY_NAME); + + // Schedule a job for the event dispatch thread: + // creating and showing this application's GUI. + javax.swing.SwingUtilities.invokeLater(new Runnable() { + @Override + public void run() { + new GoodFeaturesToTrack(args); + } + }); + } +} diff --git a/samples/java/tutorial_code/TrackingMotion/harris_detector/CornerHarrisDemo.java b/samples/java/tutorial_code/TrackingMotion/harris_detector/CornerHarrisDemo.java new file mode 100644 index 0000000000..b3c759d28f --- /dev/null +++ b/samples/java/tutorial_code/TrackingMotion/harris_detector/CornerHarrisDemo.java @@ -0,0 +1,142 @@ +import java.awt.BorderLayout; +import java.awt.Container; +import java.awt.Image; + +import javax.swing.BoxLayout; +import javax.swing.ImageIcon; +import javax.swing.JFrame; +import javax.swing.JLabel; +import javax.swing.JPanel; +import javax.swing.JSlider; +import javax.swing.event.ChangeEvent; +import javax.swing.event.ChangeListener; + +import org.opencv.core.Core; +import org.opencv.core.CvType; +import org.opencv.core.Mat; +import org.opencv.core.Point; +import org.opencv.core.Scalar; +import org.opencv.highgui.HighGui; +import org.opencv.imgcodecs.Imgcodecs; +import org.opencv.imgproc.Imgproc; + +class CornerHarris { + private Mat srcGray = new Mat(); + private Mat dst = new Mat(); + private Mat dstNorm = new Mat(); + private Mat dstNormScaled = new Mat(); + private JFrame frame; + private JLabel imgLabel; + private JLabel cornerLabel; + private static final int MAX_THRESHOLD = 255; + private int threshold = 200; + + public CornerHarris(String[] args) { + /// Load source image and convert it to gray + String filename = args.length > 0 ? args[0] : "../data/building.jpg"; + Mat src = Imgcodecs.imread(filename); + if (src.empty()) { + System.err.println("Cannot read image: " + filename); + System.exit(0); + } + + Imgproc.cvtColor(src, srcGray, Imgproc.COLOR_BGR2GRAY); + + // Create and set up the window. + frame = new JFrame("Harris corner detector demo"); + frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); + // Set up the content pane. + Image img = HighGui.toBufferedImage(src); + addComponentsToPane(frame.getContentPane(), img); + // Use the content pane's default BorderLayout. No need for + // setLayout(new BorderLayout()); + // Display the window. + frame.pack(); + frame.setVisible(true); + update(); + } + + private void addComponentsToPane(Container pane, Image img) { + if (!(pane.getLayout() instanceof BorderLayout)) { + pane.add(new JLabel("Container doesn't use BorderLayout!")); + return; + } + + JPanel sliderPanel = new JPanel(); + sliderPanel.setLayout(new BoxLayout(sliderPanel, BoxLayout.PAGE_AXIS)); + + sliderPanel.add(new JLabel("Threshold: ")); + JSlider slider = new JSlider(0, MAX_THRESHOLD, threshold); + slider.setMajorTickSpacing(20); + slider.setMinorTickSpacing(10); + slider.setPaintTicks(true); + slider.setPaintLabels(true); + slider.addChangeListener(new ChangeListener() { + @Override + public void stateChanged(ChangeEvent e) { + JSlider source = (JSlider) e.getSource(); + threshold = source.getValue(); + update(); + } + }); + sliderPanel.add(slider); + pane.add(sliderPanel, BorderLayout.PAGE_START); + + JPanel imgPanel = new JPanel(); + imgLabel = new JLabel(new ImageIcon(img)); + imgPanel.add(imgLabel); + + Mat blackImg = Mat.zeros(srcGray.size(), CvType.CV_8U); + cornerLabel = new JLabel(new ImageIcon(HighGui.toBufferedImage(blackImg))); + imgPanel.add(cornerLabel); + + pane.add(imgPanel, BorderLayout.CENTER); + } + + private void update() { + dst = Mat.zeros(srcGray.size(), CvType.CV_32F); + + /// Detector parameters + int blockSize = 2; + int apertureSize = 3; + double k = 0.04; + + /// Detecting corners + Imgproc.cornerHarris(srcGray, dst, blockSize, apertureSize, k); + + /// Normalizing + Core.normalize(dst, dstNorm, 0, 255, Core.NORM_MINMAX); + Core.convertScaleAbs(dstNorm, dstNormScaled); + + /// Drawing a circle around corners + float[] dstNormData = new float[(int) (dstNorm.total() * dstNorm.channels())]; + dstNorm.get(0, 0, dstNormData); + + for (int i = 0; i < dstNorm.rows(); i++) { + for (int j = 0; j < dstNorm.cols(); j++) { + if ((int) dstNormData[i * dstNorm.cols() + j] > threshold) { + Imgproc.circle(dstNormScaled, new Point(j, i), 5, new Scalar(0), 2, 8, 0); + } + } + } + + cornerLabel.setIcon(new ImageIcon(HighGui.toBufferedImage(dstNormScaled))); + frame.repaint(); + } +} + +public class CornerHarrisDemo { + public static void main(String[] args) { + // Load the native OpenCV library + System.loadLibrary(Core.NATIVE_LIBRARY_NAME); + + // Schedule a job for the event dispatch thread: + // creating and showing this application's GUI. + javax.swing.SwingUtilities.invokeLater(new Runnable() { + @Override + public void run() { + new CornerHarris(args); + } + }); + } +} diff --git a/samples/java/tutorial_code/features2D/feature_description/SURFMatchingDemo.java b/samples/java/tutorial_code/features2D/feature_description/SURFMatchingDemo.java new file mode 100644 index 0000000000..ac64417d93 --- /dev/null +++ b/samples/java/tutorial_code/features2D/feature_description/SURFMatchingDemo.java @@ -0,0 +1,56 @@ +import org.opencv.core.Core; +import org.opencv.core.Mat; +import org.opencv.core.MatOfDMatch; +import org.opencv.core.MatOfKeyPoint; +import org.opencv.features2d.DescriptorMatcher; +import org.opencv.features2d.Features2d; +import org.opencv.highgui.HighGui; +import org.opencv.imgcodecs.Imgcodecs; +import org.opencv.xfeatures2d.SURF; + +class SURFMatching { + public void run(String[] args) { + String filename1 = args.length > 1 ? args[0] : "../data/box.png"; + String filename2 = args.length > 1 ? args[1] : "../data/box_in_scene.png"; + Mat img1 = Imgcodecs.imread(filename1, Imgcodecs.IMREAD_GRAYSCALE); + Mat img2 = Imgcodecs.imread(filename2, Imgcodecs.IMREAD_GRAYSCALE); + if (img1.empty() || img2.empty()) { + System.err.println("Cannot read images!"); + System.exit(0); + } + + //-- Step 1: Detect the keypoints using SURF Detector, compute the descriptors + double hessianThreshold = 400; + int nOctaves = 4, nOctaveLayers = 3; + boolean extended = false, upright = false; + SURF detector = SURF.create(hessianThreshold, nOctaves, nOctaveLayers, extended, upright); + MatOfKeyPoint keypoints1 = new MatOfKeyPoint(), keypoints2 = new MatOfKeyPoint(); + Mat descriptors1 = new Mat(), descriptors2 = new Mat(); + detector.detectAndCompute(img1, new Mat(), keypoints1, descriptors1); + detector.detectAndCompute(img2, new Mat(), keypoints2, descriptors2); + + //-- Step 2: Matching descriptor vectors with a brute force matcher + // Since SURF is a floating-point descriptor NORM_L2 is used + DescriptorMatcher matcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE); + MatOfDMatch matches = new MatOfDMatch(); + matcher.match(descriptors1, descriptors2, matches); + + //-- Draw matches + Mat imgMatches = new Mat(); + Features2d.drawMatches(img1, keypoints1, img2, keypoints2, matches, imgMatches); + + HighGui.imshow("Matches", imgMatches); + HighGui.waitKey(0); + + System.exit(0); + } +} + +public class SURFMatchingDemo { + public static void main(String[] args) { + // Load the native OpenCV library + System.loadLibrary(Core.NATIVE_LIBRARY_NAME); + + new SURFMatching().run(args); + } +} diff --git a/samples/java/tutorial_code/features2D/feature_detection/SURFDetectionDemo.java b/samples/java/tutorial_code/features2D/feature_detection/SURFDetectionDemo.java new file mode 100644 index 0000000000..c78a0c66bd --- /dev/null +++ b/samples/java/tutorial_code/features2D/feature_detection/SURFDetectionDemo.java @@ -0,0 +1,44 @@ +import org.opencv.core.Core; +import org.opencv.core.Mat; +import org.opencv.core.MatOfKeyPoint; +import org.opencv.features2d.Features2d; +import org.opencv.highgui.HighGui; +import org.opencv.imgcodecs.Imgcodecs; +import org.opencv.xfeatures2d.SURF; + +class SURFDetection { + public void run(String[] args) { + String filename = args.length > 0 ? args[0] : "../data/box.png"; + Mat src = Imgcodecs.imread(filename, Imgcodecs.IMREAD_GRAYSCALE); + if (src.empty()) { + System.err.println("Cannot read image: " + filename); + System.exit(0); + } + + //-- Step 1: Detect the keypoints using SURF Detector + double hessianThreshold = 400; + int nOctaves = 4, nOctaveLayers = 3; + boolean extended = false, upright = false; + SURF detector = SURF.create(hessianThreshold, nOctaves, nOctaveLayers, extended, upright); + MatOfKeyPoint keypoints = new MatOfKeyPoint(); + detector.detect(src, keypoints); + + //-- Draw keypoints + Features2d.drawKeypoints(src, keypoints, src); + + //-- Show detected (drawn) keypoints + HighGui.imshow("SURF Keypoints", src); + HighGui.waitKey(0); + + System.exit(0); + } +} + +public class SURFDetectionDemo { + public static void main(String[] args) { + // Load the native OpenCV library + System.loadLibrary(Core.NATIVE_LIBRARY_NAME); + + new SURFDetection().run(args); + } +} diff --git a/samples/java/tutorial_code/features2D/feature_flann_matcher/SURFFLANNMatchingDemo.java b/samples/java/tutorial_code/features2D/feature_flann_matcher/SURFFLANNMatchingDemo.java new file mode 100644 index 0000000000..e02af9cadb --- /dev/null +++ b/samples/java/tutorial_code/features2D/feature_flann_matcher/SURFFLANNMatchingDemo.java @@ -0,0 +1,78 @@ +import java.util.ArrayList; +import java.util.List; + +import org.opencv.core.Core; +import org.opencv.core.DMatch; +import org.opencv.core.Mat; +import org.opencv.core.MatOfByte; +import org.opencv.core.MatOfDMatch; +import org.opencv.core.MatOfKeyPoint; +import org.opencv.core.Scalar; +import org.opencv.features2d.DescriptorMatcher; +import org.opencv.features2d.Features2d; +import org.opencv.highgui.HighGui; +import org.opencv.imgcodecs.Imgcodecs; +import org.opencv.xfeatures2d.SURF; + +class SURFFLANNMatching { + public void run(String[] args) { + String filename1 = args.length > 1 ? args[0] : "../data/box.png"; + String filename2 = args.length > 1 ? args[1] : "../data/box_in_scene.png"; + Mat img1 = Imgcodecs.imread(filename1, Imgcodecs.IMREAD_GRAYSCALE); + Mat img2 = Imgcodecs.imread(filename2, Imgcodecs.IMREAD_GRAYSCALE); + if (img1.empty() || img2.empty()) { + System.err.println("Cannot read images!"); + System.exit(0); + } + + //-- Step 1: Detect the keypoints using SURF Detector, compute the descriptors + double hessianThreshold = 400; + int nOctaves = 4, nOctaveLayers = 3; + boolean extended = false, upright = false; + SURF detector = SURF.create(hessianThreshold, nOctaves, nOctaveLayers, extended, upright); + MatOfKeyPoint keypoints1 = new MatOfKeyPoint(), keypoints2 = new MatOfKeyPoint(); + Mat descriptors1 = new Mat(), descriptors2 = new Mat(); + detector.detectAndCompute(img1, new Mat(), keypoints1, descriptors1); + detector.detectAndCompute(img2, new Mat(), keypoints2, descriptors2); + + //-- Step 2: Matching descriptor vectors with a FLANN based matcher + // Since SURF is a floating-point descriptor NORM_L2 is used + DescriptorMatcher matcher = DescriptorMatcher.create(DescriptorMatcher.FLANNBASED); + List knnMatches = new ArrayList<>(); + matcher.knnMatch(descriptors1, descriptors2, knnMatches, 2); + + //-- Filter matches using the Lowe's ratio test + float ratio_thresh = 0.7f; + List listOfGoodMatches = new ArrayList<>(); + for (int i = 0; i < knnMatches.size(); i++) { + if (knnMatches.get(i).rows() > 1) { + DMatch[] matches = knnMatches.get(i).toArray(); + if (matches[0].distance / matches[1].distance <= ratio_thresh) { + listOfGoodMatches.add(matches[0]); + } + } + } + MatOfDMatch goodMatches = new MatOfDMatch(); + goodMatches.fromList(listOfGoodMatches); + + //-- Draw matches + Mat imgMatches = new Mat(); + Features2d.drawMatches(img1, keypoints1, img2, keypoints2, goodMatches, imgMatches, Scalar.all(-1), + Scalar.all(-1), new MatOfByte(), Features2d.NOT_DRAW_SINGLE_POINTS); + + //-- Show detected matches + HighGui.imshow("Good Matches", imgMatches); + HighGui.waitKey(0); + + System.exit(0); + } +} + +public class SURFFLANNMatchingDemo { + public static void main(String[] args) { + // Load the native OpenCV library + System.loadLibrary(Core.NATIVE_LIBRARY_NAME); + + new SURFFLANNMatching().run(args); + } +} diff --git a/samples/java/tutorial_code/features2D/feature_homography/SURFFLANNMatchingHomographyDemo.java b/samples/java/tutorial_code/features2D/feature_homography/SURFFLANNMatchingHomographyDemo.java new file mode 100644 index 0000000000..1a5cbe7f30 --- /dev/null +++ b/samples/java/tutorial_code/features2D/feature_homography/SURFFLANNMatchingHomographyDemo.java @@ -0,0 +1,130 @@ +import java.util.ArrayList; +import java.util.List; + +import org.opencv.calib3d.Calib3d; +import org.opencv.core.Core; +import org.opencv.core.CvType; +import org.opencv.core.DMatch; +import org.opencv.core.KeyPoint; +import org.opencv.core.Mat; +import org.opencv.core.MatOfByte; +import org.opencv.core.MatOfDMatch; +import org.opencv.core.MatOfKeyPoint; +import org.opencv.core.MatOfPoint2f; +import org.opencv.core.Point; +import org.opencv.core.Scalar; +import org.opencv.features2d.DescriptorMatcher; +import org.opencv.features2d.Features2d; +import org.opencv.highgui.HighGui; +import org.opencv.imgcodecs.Imgcodecs; +import org.opencv.imgproc.Imgproc; +import org.opencv.xfeatures2d.SURF; + +class SURFFLANNMatchingHomography { + public void run(String[] args) { + String filenameObject = args.length > 1 ? args[0] : "../data/box.png"; + String filenameScene = args.length > 1 ? args[1] : "../data/box_in_scene.png"; + Mat imgObject = Imgcodecs.imread(filenameObject, Imgcodecs.IMREAD_GRAYSCALE); + Mat imgScene = Imgcodecs.imread(filenameScene, Imgcodecs.IMREAD_GRAYSCALE); + if (imgObject.empty() || imgScene.empty()) { + System.err.println("Cannot read images!"); + System.exit(0); + } + + //-- Step 1: Detect the keypoints using SURF Detector, compute the descriptors + double hessianThreshold = 400; + int nOctaves = 4, nOctaveLayers = 3; + boolean extended = false, upright = false; + SURF detector = SURF.create(hessianThreshold, nOctaves, nOctaveLayers, extended, upright); + MatOfKeyPoint keypointsObject = new MatOfKeyPoint(), keypointsScene = new MatOfKeyPoint(); + Mat descriptorsObject = new Mat(), descriptorsScene = new Mat(); + detector.detectAndCompute(imgObject, new Mat(), keypointsObject, descriptorsObject); + detector.detectAndCompute(imgScene, new Mat(), keypointsScene, descriptorsScene); + + //-- Step 2: Matching descriptor vectors with a FLANN based matcher + // Since SURF is a floating-point descriptor NORM_L2 is used + DescriptorMatcher matcher = DescriptorMatcher.create(DescriptorMatcher.FLANNBASED); + List knnMatches = new ArrayList<>(); + matcher.knnMatch(descriptorsObject, descriptorsScene, knnMatches, 2); + + //-- Filter matches using the Lowe's ratio test + float ratio_thresh = 0.75f; + List listOfGoodMatches = new ArrayList<>(); + for (int i = 0; i < knnMatches.size(); i++) { + if (knnMatches.get(i).rows() > 1) { + DMatch[] matches = knnMatches.get(i).toArray(); + if (matches[0].distance / matches[1].distance <= ratio_thresh) { + listOfGoodMatches.add(matches[0]); + } + } + } + MatOfDMatch goodMatches = new MatOfDMatch(); + goodMatches.fromList(listOfGoodMatches); + + //-- Draw matches + Mat imgMatches = new Mat(); + Features2d.drawMatches(imgObject, keypointsObject, imgScene, keypointsScene, goodMatches, imgMatches, Scalar.all(-1), + Scalar.all(-1), new MatOfByte(), Features2d.NOT_DRAW_SINGLE_POINTS); + + //-- Localize the object + List obj = new ArrayList<>(); + List scene = new ArrayList<>(); + + List listOfKeypointsObject = keypointsObject.toList(); + List listOfKeypointsScene = keypointsScene.toList(); + for (int i = 0; i < listOfGoodMatches.size(); i++) { + //-- Get the keypoints from the good matches + obj.add(listOfKeypointsObject.get(listOfGoodMatches.get(i).queryIdx).pt); + scene.add(listOfKeypointsScene.get(listOfGoodMatches.get(i).trainIdx).pt); + } + + MatOfPoint2f objMat = new MatOfPoint2f(), sceneMat = new MatOfPoint2f(); + objMat.fromList(obj); + sceneMat.fromList(scene); + double ransacReprojThreshold = 3.0; + Mat H = Calib3d.findHomography( objMat, sceneMat, Calib3d.RANSAC, ransacReprojThreshold ); + + //-- Get the corners from the image_1 ( the object to be "detected" ) + Mat objCorners = new Mat(4, 1, CvType.CV_32FC2), sceneCorners = new Mat(); + float[] objCornersData = new float[(int) (objCorners.total() * objCorners.channels())]; + objCorners.get(0, 0, objCornersData); + objCornersData[0] = 0; + objCornersData[1] = 0; + objCornersData[2] = imgObject.cols(); + objCornersData[3] = 0; + objCornersData[4] = imgObject.cols(); + objCornersData[5] = imgObject.rows(); + objCornersData[6] = 0; + objCornersData[7] = imgObject.rows(); + objCorners.put(0, 0, objCornersData); + + Core.perspectiveTransform(objCorners, sceneCorners, H); + float[] sceneCornersData = new float[(int) (sceneCorners.total() * sceneCorners.channels())]; + sceneCorners.get(0, 0, sceneCornersData); + + //-- Draw lines between the corners (the mapped object in the scene - image_2 ) + Imgproc.line(imgMatches, new Point(sceneCornersData[0] + imgObject.cols(), sceneCornersData[1]), + new Point(sceneCornersData[2] + imgObject.cols(), sceneCornersData[3]), new Scalar(0, 255, 0), 4); + Imgproc.line(imgMatches, new Point(sceneCornersData[2] + imgObject.cols(), sceneCornersData[3]), + new Point(sceneCornersData[4] + imgObject.cols(), sceneCornersData[5]), new Scalar(0, 255, 0), 4); + Imgproc.line(imgMatches, new Point(sceneCornersData[4] + imgObject.cols(), sceneCornersData[5]), + new Point(sceneCornersData[6] + imgObject.cols(), sceneCornersData[7]), new Scalar(0, 255, 0), 4); + Imgproc.line(imgMatches, new Point(sceneCornersData[6] + imgObject.cols(), sceneCornersData[7]), + new Point(sceneCornersData[0] + imgObject.cols(), sceneCornersData[1]), new Scalar(0, 255, 0), 4); + + //-- Show detected matches + HighGui.imshow("Good Matches & Object detection", imgMatches); + HighGui.waitKey(0); + + System.exit(0); + } +} + +public class SURFFLANNMatchingHomographyDemo { + public static void main(String[] args) { + // Load the native OpenCV library + System.loadLibrary(Core.NATIVE_LIBRARY_NAME); + + new SURFFLANNMatchingHomography().run(args); + } +} diff --git a/samples/python/tutorial_code/TrackingMotion/corner_subpixels/cornerSubPix_Demo.py b/samples/python/tutorial_code/TrackingMotion/corner_subpixels/cornerSubPix_Demo.py new file mode 100644 index 0000000000..72ce96b1a6 --- /dev/null +++ b/samples/python/tutorial_code/TrackingMotion/corner_subpixels/cornerSubPix_Demo.py @@ -0,0 +1,70 @@ +from __future__ import print_function +import cv2 as cv +import numpy as np +import argparse +import random as rng + +source_window = 'Image' +maxTrackbar = 25 +rng.seed(12345) + +def goodFeaturesToTrack_Demo(val): + maxCorners = max(val, 1) + + # Parameters for Shi-Tomasi algorithm + qualityLevel = 0.01 + minDistance = 10 + blockSize = 3 + gradientSize = 3 + useHarrisDetector = False + k = 0.04 + + # Copy the source image + copy = np.copy(src) + + # Apply corner detection + corners = cv.goodFeaturesToTrack(src_gray, maxCorners, qualityLevel, minDistance, None, \ + blockSize=blockSize, gradientSize=gradientSize, useHarrisDetector=useHarrisDetector, k=k) + + # Draw corners detected + print('** Number of corners detected:', corners.shape[0]) + radius = 4 + for i in range(corners.shape[0]): + cv.circle(copy, (corners[i,0,0], corners[i,0,1]), radius, (rng.randint(0,256), rng.randint(0,256), rng.randint(0,256)), cv.FILLED) + + # Show what you got + cv.namedWindow(source_window) + cv.imshow(source_window, copy) + + # Set the needed parameters to find the refined corners + winSize = (5, 5) + zeroZone = (-1, -1) + criteria = (cv.TERM_CRITERIA_EPS + cv.TermCriteria_COUNT, 40, 0.001) + + # Calculate the refined corner locations + corners = cv.cornerSubPix(src_gray, corners, winSize, zeroZone, criteria) + + # Write them down + for i in range(corners.shape[0]): + print(" -- Refined Corner [", i, "] (", corners[i,0,0], ",", corners[i,0,1], ")") + +# Load source image and convert it to gray +parser = argparse.ArgumentParser(description='Code for Shi-Tomasi corner detector tutorial.') +parser.add_argument('--input', help='Path to input image.', default='../data/pic3.png') +args = parser.parse_args() + +src = cv.imread(args.input) +if src is None: + print('Could not open or find the image:', args.input) + exit(0) + +src_gray = cv.cvtColor(src, cv.COLOR_BGR2GRAY) + +# Create a window and a trackbar +cv.namedWindow(source_window) +maxCorners = 10 # initial threshold +cv.createTrackbar('Threshold: ', source_window, maxCorners, maxTrackbar, goodFeaturesToTrack_Demo) +cv.imshow(source_window, src) +goodFeaturesToTrack_Demo(maxCorners) + +cv.waitKey() diff --git a/samples/python/tutorial_code/TrackingMotion/generic_corner_detector/cornerDetector_Demo.py b/samples/python/tutorial_code/TrackingMotion/generic_corner_detector/cornerDetector_Demo.py new file mode 100644 index 0000000000..d135367fc2 --- /dev/null +++ b/samples/python/tutorial_code/TrackingMotion/generic_corner_detector/cornerDetector_Demo.py @@ -0,0 +1,80 @@ +from __future__ import print_function +import cv2 as cv +import numpy as np +import argparse +import random as rng + +myHarris_window = 'My Harris corner detector' +myShiTomasi_window = 'My Shi Tomasi corner detector' +myHarris_qualityLevel = 50 +myShiTomasi_qualityLevel = 50 +max_qualityLevel = 100 +rng.seed(12345) + +def myHarris_function(val): + myHarris_copy = np.copy(src) + myHarris_qualityLevel = max(val, 1) + + for i in range(src_gray.shape[0]): + for j in range(src_gray.shape[1]): + if Mc[i,j] > myHarris_minVal + ( myHarris_maxVal - myHarris_minVal )*myHarris_qualityLevel/max_qualityLevel: + cv.circle(myHarris_copy, (j,i), 4, (rng.randint(0,256), rng.randint(0,256), rng.randint(0,256)), cv.FILLED) + + cv.imshow(myHarris_window, myHarris_copy) + +def myShiTomasi_function(val): + myShiTomasi_copy = np.copy(src) + myShiTomasi_qualityLevel = max(val, 1) + + for i in range(src_gray.shape[0]): + for j in range(src_gray.shape[1]): + if myShiTomasi_dst[i,j] > myShiTomasi_minVal + ( myShiTomasi_maxVal - myShiTomasi_minVal )*myShiTomasi_qualityLevel/max_qualityLevel: + cv.circle(myShiTomasi_copy, (j,i), 4, (rng.randint(0,256), rng.randint(0,256), rng.randint(0,256)), cv.FILLED) + + cv.imshow(myShiTomasi_window, myShiTomasi_copy) + +# Load source image and convert it to gray +parser = argparse.ArgumentParser(description='Code for Creating your own corner detector tutorial.') +parser.add_argument('--input', help='Path to input image.', default='../data/building.jpg') +args = parser.parse_args() + +src = cv.imread(args.input) +if src is None: + print('Could not open or find the image:', args.input) + exit(0) + +src_gray = cv.cvtColor(src, cv.COLOR_BGR2GRAY) + +# Set some parameters +blockSize = 3 +apertureSize = 3 + +# My Harris matrix -- Using cornerEigenValsAndVecs +myHarris_dst = cv.cornerEigenValsAndVecs(src_gray, blockSize, apertureSize) + +# calculate Mc +Mc = np.empty(src_gray.shape, dtype=np.float32) +for i in range(src_gray.shape[0]): + for j in range(src_gray.shape[1]): + lambda_1 = myHarris_dst[i,j,0] + lambda_2 = myHarris_dst[i,j,1] + Mc[i,j] = lambda_1*lambda_2 - 0.04*pow( ( lambda_1 + lambda_2 ), 2 ) + +myHarris_minVal, myHarris_maxVal, _, _ = cv.minMaxLoc(Mc) + +# Create Window and Trackbar +cv.namedWindow(myHarris_window) +cv.createTrackbar('Quality Level:', myHarris_window, myHarris_qualityLevel, max_qualityLevel, myHarris_function) +myHarris_function(myHarris_qualityLevel) + +# My Shi-Tomasi -- Using cornerMinEigenVal +myShiTomasi_dst = cv.cornerMinEigenVal(src_gray, blockSize, apertureSize) + +myShiTomasi_minVal, myShiTomasi_maxVal, _, _ = cv.minMaxLoc(myShiTomasi_dst) + +# Create Window and Trackbar +cv.namedWindow(myShiTomasi_window) +cv.createTrackbar('Quality Level:', myShiTomasi_window, myShiTomasi_qualityLevel, max_qualityLevel, myShiTomasi_function) +myShiTomasi_function(myShiTomasi_qualityLevel) + +cv.waitKey() diff --git a/samples/python/tutorial_code/TrackingMotion/good_features_to_track/goodFeaturesToTrack_Demo.py b/samples/python/tutorial_code/TrackingMotion/good_features_to_track/goodFeaturesToTrack_Demo.py new file mode 100644 index 0000000000..57e767ccee --- /dev/null +++ b/samples/python/tutorial_code/TrackingMotion/good_features_to_track/goodFeaturesToTrack_Demo.py @@ -0,0 +1,58 @@ +from __future__ import print_function +import cv2 as cv +import numpy as np +import argparse +import random as rng + +source_window = 'Image' +maxTrackbar = 100 +rng.seed(12345) + +def goodFeaturesToTrack_Demo(val): + maxCorners = max(val, 1) + + # Parameters for Shi-Tomasi algorithm + qualityLevel = 0.01 + minDistance = 10 + blockSize = 3 + gradientSize = 3 + useHarrisDetector = False + k = 0.04 + + # Copy the source image + copy = np.copy(src) + + # Apply corner detection + corners = cv.goodFeaturesToTrack(src_gray, maxCorners, qualityLevel, minDistance, None, \ + blockSize=blockSize, gradientSize=gradientSize, useHarrisDetector=useHarrisDetector, k=k) + + # Draw corners detected + print('** Number of corners detected:', corners.shape[0]) + radius = 4 + for i in range(corners.shape[0]): + cv.circle(copy, (corners[i,0,0], corners[i,0,1]), radius, (rng.randint(0,256), rng.randint(0,256), rng.randint(0,256)), cv.FILLED) + + # Show what you got + cv.namedWindow(source_window) + cv.imshow(source_window, copy) + +# Load source image and convert it to gray +parser = argparse.ArgumentParser(description='Code for Shi-Tomasi corner detector tutorial.') +parser.add_argument('--input', help='Path to input image.', default='../data/pic3.png') +args = parser.parse_args() + +src = cv.imread(args.input) +if src is None: + print('Could not open or find the image:', args.input) + exit(0) + +src_gray = cv.cvtColor(src, cv.COLOR_BGR2GRAY) + +# Create a window and a trackbar +cv.namedWindow(source_window) +maxCorners = 23 # initial threshold +cv.createTrackbar('Threshold: ', source_window, maxCorners, maxTrackbar, goodFeaturesToTrack_Demo) +cv.imshow(source_window, src) +goodFeaturesToTrack_Demo(maxCorners) + +cv.waitKey() diff --git a/samples/python/tutorial_code/TrackingMotion/harris_detector/cornerHarris_Demo.py b/samples/python/tutorial_code/TrackingMotion/harris_detector/cornerHarris_Demo.py new file mode 100644 index 0000000000..cee7679adf --- /dev/null +++ b/samples/python/tutorial_code/TrackingMotion/harris_detector/cornerHarris_Demo.py @@ -0,0 +1,55 @@ +from __future__ import print_function +import cv2 as cv +import numpy as np +import argparse + +source_window = 'Source image' +corners_window = 'Corners detected' +max_thresh = 255 + +def cornerHarris_demo(val): + thresh = val + + # Detector parameters + blockSize = 2 + apertureSize = 3 + k = 0.04 + + # Detecting corners + dst = cv.cornerHarris(src_gray, blockSize, apertureSize, k) + + # Normalizing + dst_norm = np.empty(dst.shape, dtype=np.float32) + cv.normalize(dst, dst_norm, alpha=0, beta=255, norm_type=cv.NORM_MINMAX) + dst_norm_scaled = cv.convertScaleAbs(dst_norm) + + # Drawing a circle around corners + for i in range(dst_norm.shape[0]): + for j in range(dst_norm.shape[1]): + if int(dst_norm[i,j]) > thresh: + cv.circle(dst_norm_scaled, (j,i), 5, (0), 2) + + # Showing the result + cv.namedWindow(corners_window) + cv.imshow(corners_window, dst_norm_scaled) + +# Load source image and convert it to gray +parser = argparse.ArgumentParser(description='Code for Harris corner detector tutorial.') +parser.add_argument('--input', help='Path to input image.', default='../data/building.jpg') +args = parser.parse_args() + +src = cv.imread(args.input) +if src is None: + print('Could not open or find the image:', args.input) + exit(0) + +src_gray = cv.cvtColor(src, cv.COLOR_BGR2GRAY) + +# Create a window and a trackbar +cv.namedWindow(source_window) +thresh = 200 # initial threshold +cv.createTrackbar('Threshold: ', source_window, thresh, max_thresh, cornerHarris_demo) +cv.imshow(source_window, src) +cornerHarris_demo(thresh) + +cv.waitKey() diff --git a/samples/python/tutorial_code/features2D/feature_description/SURF_matching_Demo.py b/samples/python/tutorial_code/features2D/feature_description/SURF_matching_Demo.py new file mode 100644 index 0000000000..f50e48d858 --- /dev/null +++ b/samples/python/tutorial_code/features2D/feature_description/SURF_matching_Demo.py @@ -0,0 +1,35 @@ +from __future__ import print_function +import cv2 as cv +import numpy as np +import argparse + +parser = argparse.ArgumentParser(description='Code for Feature Detection tutorial.') +parser.add_argument('--input1', help='Path to input image 1.', default='../data/box.png') +parser.add_argument('--input2', help='Path to input image 2.', default='../data/box_in_scene.png') +args = parser.parse_args() + +img1 = cv.imread(args.input1, cv.IMREAD_GRAYSCALE) +img2 = cv.imread(args.input2, cv.IMREAD_GRAYSCALE) +if img1 is None or img2 is None: + print('Could not open or find the images!') + exit(0) + +#-- Step 1: Detect the keypoints using SURF Detector, compute the descriptors +minHessian = 400 +detector = cv.xfeatures2d_SURF.create(hessianThreshold=minHessian) +keypoints1, descriptors1 = detector.detectAndCompute(img1, None) +keypoints2, descriptors2 = detector.detectAndCompute(img2, None) + +#-- Step 2: Matching descriptor vectors with a brute force matcher +# Since SURF is a floating-point descriptor NORM_L2 is used +matcher = cv.DescriptorMatcher_create(cv.DescriptorMatcher_BRUTEFORCE) +matches = matcher.match(descriptors1, descriptors2) + +#-- Draw matches +img_matches = np.empty((max(img1.shape[0], img2.shape[0]), img1.shape[1]+img2.shape[1], 3), dtype=np.uint8) +cv.drawMatches(img1, keypoints1, img2, keypoints2, matches, img_matches) + +#-- Show detected matches +cv.imshow('Matches', img_matches) + +cv.waitKey() diff --git a/samples/python/tutorial_code/features2D/feature_detection/SURF_detection_Demo.py b/samples/python/tutorial_code/features2D/feature_detection/SURF_detection_Demo.py new file mode 100644 index 0000000000..717d9f13c0 --- /dev/null +++ b/samples/python/tutorial_code/features2D/feature_detection/SURF_detection_Demo.py @@ -0,0 +1,27 @@ +from __future__ import print_function +import cv2 as cv +import numpy as np +import argparse + +parser = argparse.ArgumentParser(description='Code for Feature Detection tutorial.') +parser.add_argument('--input', help='Path to input image.', default='../data/box.png') +args = parser.parse_args() + +src = cv.imread(args.input, cv.IMREAD_GRAYSCALE) +if src is None: + print('Could not open or find the image:', args.input) + exit(0) + +#-- Step 1: Detect the keypoints using SURF Detector +minHessian = 400 +detector = cv.xfeatures2d_SURF.create(hessianThreshold=minHessian) +keypoints = detector.detect(src) + +#-- Draw keypoints +img_keypoints = np.empty((src.shape[0], src.shape[1], 3), dtype=np.uint8) +cv.drawKeypoints(src, keypoints, img_keypoints) + +#-- Show detected (drawn) keypoints +cv.imshow('SURF Keypoints', img_keypoints) + +cv.waitKey() diff --git a/samples/python/tutorial_code/features2D/feature_flann_matcher/SURF_FLANN_matching_Demo.py b/samples/python/tutorial_code/features2D/feature_flann_matcher/SURF_FLANN_matching_Demo.py new file mode 100644 index 0000000000..d22f9a8a6f --- /dev/null +++ b/samples/python/tutorial_code/features2D/feature_flann_matcher/SURF_FLANN_matching_Demo.py @@ -0,0 +1,43 @@ +from __future__ import print_function +import cv2 as cv +import numpy as np +import argparse + +parser = argparse.ArgumentParser(description='Code for Feature Matching with FLANN tutorial.') +parser.add_argument('--input1', help='Path to input image 1.', default='../data/box.png') +parser.add_argument('--input2', help='Path to input image 2.', default='../data/box_in_scene.png') +args = parser.parse_args() + +img1 = cv.imread(args.input1, cv.IMREAD_GRAYSCALE) +img2 = cv.imread(args.input2, cv.IMREAD_GRAYSCALE) +if img1 is None or img2 is None: + print('Could not open or find the images!') + exit(0) + +#-- Step 1: Detect the keypoints using SURF Detector, compute the descriptors +minHessian = 400 +detector = cv.xfeatures2d_SURF.create(hessianThreshold=minHessian) +keypoints1, descriptors1 = detector.detectAndCompute(img1, None) +keypoints2, descriptors2 = detector.detectAndCompute(img2, None) + +#-- Step 2: Matching descriptor vectors with a FLANN based matcher +# Since SURF is a floating-point descriptor NORM_L2 is used +matcher = cv.DescriptorMatcher_create(cv.DescriptorMatcher_FLANNBASED) +knn_matches = matcher.knnMatch(descriptors1, descriptors2, 2) + +#-- Filter matches using the Lowe's ratio test +ratio_thresh = 0.7 +good_matches = [] +for matches in knn_matches: + if len(matches) > 1: + if matches[0].distance / matches[1].distance <= ratio_thresh: + good_matches.append(matches[0]) + +#-- Draw matches +img_matches = np.empty((max(img1.shape[0], img2.shape[0]), img1.shape[1]+img2.shape[1], 3), dtype=np.uint8) +cv.drawMatches(img1, keypoints1, img2, keypoints2, good_matches, img_matches, flags=cv.DrawMatchesFlags_NOT_DRAW_SINGLE_POINTS) + +#-- Show detected matches +cv.imshow('Good Matches', img_matches) + +cv.waitKey() diff --git a/samples/python/tutorial_code/features2D/feature_homography/SURF_FLANN_matching_homography_Demo.py b/samples/python/tutorial_code/features2D/feature_homography/SURF_FLANN_matching_homography_Demo.py new file mode 100644 index 0000000000..8820addce2 --- /dev/null +++ b/samples/python/tutorial_code/features2D/feature_homography/SURF_FLANN_matching_homography_Demo.py @@ -0,0 +1,78 @@ +from __future__ import print_function +import cv2 as cv +import numpy as np +import argparse + +parser = argparse.ArgumentParser(description='Code for Feature Matching with FLANN tutorial.') +parser.add_argument('--input1', help='Path to input image 1.', default='../data/box.png') +parser.add_argument('--input2', help='Path to input image 2.', default='../data/box_in_scene.png') +args = parser.parse_args() + +img_object = cv.imread(args.input1, cv.IMREAD_GRAYSCALE) +img_scene = cv.imread(args.input2, cv.IMREAD_GRAYSCALE) +if img_object is None or img_scene is None: + print('Could not open or find the images!') + exit(0) + +#-- Step 1: Detect the keypoints using SURF Detector, compute the descriptors +minHessian = 400 +detector = cv.xfeatures2d_SURF.create(hessianThreshold=minHessian) +keypoints_obj, descriptors_obj = detector.detectAndCompute(img_object, None) +keypoints_scene, descriptors_scene = detector.detectAndCompute(img_scene, None) + +#-- Step 2: Matching descriptor vectors with a FLANN based matcher +# Since SURF is a floating-point descriptor NORM_L2 is used +matcher = cv.DescriptorMatcher_create(cv.DescriptorMatcher_FLANNBASED) +knn_matches = matcher.knnMatch(descriptors_obj, descriptors_scene, 2) + +#-- Filter matches using the Lowe's ratio test +ratio_thresh = 0.75 +good_matches = [] +for matches in knn_matches: + if len(matches) > 1: + if matches[0].distance / matches[1].distance <= ratio_thresh: + good_matches.append(matches[0]) + +#-- Draw matches +img_matches = np.empty((max(img_object.shape[0], img_scene.shape[0]), img_object.shape[1]+img_scene.shape[1], 3), dtype=np.uint8) +cv.drawMatches(img_object, keypoints_obj, img_scene, keypoints_scene, good_matches, img_matches, flags=cv.DrawMatchesFlags_NOT_DRAW_SINGLE_POINTS) + +#-- Localize the object +obj = np.empty((len(good_matches),2), dtype=np.float32) +scene = np.empty((len(good_matches),2), dtype=np.float32) +for i in range(len(good_matches)): + #-- Get the keypoints from the good matches + obj[i,0] = keypoints_obj[good_matches[i].queryIdx].pt[0] + obj[i,1] = keypoints_obj[good_matches[i].queryIdx].pt[1] + scene[i,0] = keypoints_scene[good_matches[i].trainIdx].pt[0] + scene[i,1] = keypoints_scene[good_matches[i].trainIdx].pt[1] + +H, _ = cv.findHomography(obj, scene, cv.RANSAC) + +#-- Get the corners from the image_1 ( the object to be "detected" ) +obj_corners = np.empty((4,1,2), dtype=np.float32) +obj_corners[0,0,0] = 0 +obj_corners[0,0,1] = 0 +obj_corners[1,0,0] = img_object.shape[1] +obj_corners[1,0,1] = 0 +obj_corners[2,0,0] = img_object.shape[1] +obj_corners[2,0,1] = img_object.shape[0] +obj_corners[3,0,0] = 0 +obj_corners[3,0,1] = img_object.shape[0] + +scene_corners = cv.perspectiveTransform(obj_corners, H) + +#-- Draw lines between the corners (the mapped object in the scene - image_2 ) +cv.line(img_matches, (int(scene_corners[0,0,0] + img_object.shape[1]), int(scene_corners[0,0,1])),\ + (int(scene_corners[1,0,0] + img_object.shape[1]), int(scene_corners[1,0,1])), (0,255,0), 4) +cv.line(img_matches, (int(scene_corners[1,0,0] + img_object.shape[1]), int(scene_corners[1,0,1])),\ + (int(scene_corners[2,0,0] + img_object.shape[1]), int(scene_corners[2,0,1])), (0,255,0), 4) +cv.line(img_matches, (int(scene_corners[2,0,0] + img_object.shape[1]), int(scene_corners[2,0,1])),\ + (int(scene_corners[3,0,0] + img_object.shape[1]), int(scene_corners[3,0,1])), (0,255,0), 4) +cv.line(img_matches, (int(scene_corners[3,0,0] + img_object.shape[1]), int(scene_corners[3,0,1])),\ + (int(scene_corners[0,0,0] + img_object.shape[1]), int(scene_corners[0,0,1])), (0,255,0), 4) + +#-- Show detected matches +cv.imshow('Good Matches & Object detection', img_matches) + +cv.waitKey() From 085be6a4453b30c6961f517de68be9a8857ce2e9 Mon Sep 17 00:00:00 2001 From: Dmitry Kurtaev Date: Tue, 29 May 2018 12:15:47 +0300 Subject: [PATCH 02/40] Fix dilated convolution from Keras --- modules/dnn/src/tensorflow/tf_importer.cpp | 9 +++++++-- modules/dnn/test/test_tf_importer.cpp | 1 + 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/modules/dnn/src/tensorflow/tf_importer.cpp b/modules/dnn/src/tensorflow/tf_importer.cpp index 195b516813..bca150e3b5 100644 --- a/modules/dnn/src/tensorflow/tf_importer.cpp +++ b/modules/dnn/src/tensorflow/tf_importer.cpp @@ -644,8 +644,9 @@ void TFImporter::populateNet(Net dstNet) CV_Assert(layer.input_size() == 3); DictValue dilation = parseDims(getConstBlob(layer, value_id, 1)); - CV_Assert(dilation.size() == 2 && dilation.get(0) == dilation.get(1)); - layerParams.set("dilation", dilation.get(0)); + CV_Assert(dilation.size() == 2); + layerParams.set("dilation_h", dilation.get(0)); + layerParams.set("dilation_w", dilation.get(1)); Mat paddings; parseTensor(getConstBlob(layer, value_id, 2), paddings); @@ -655,6 +656,10 @@ void TFImporter::populateNet(Net dstNet) layerParams.set("pad_w", paddings.at(2)); StrIntVector next_layers = getNextLayers(net, name, "Conv2D"); + if (next_layers.empty()) + { + next_layers = getNextLayers(net, name, "DepthwiseConv2dNative"); + } CV_Assert(next_layers.size() == 1); layer = net.node(next_layers[0].second); layers_to_ignore.insert(next_layers[0].first); diff --git a/modules/dnn/test/test_tf_importer.cpp b/modules/dnn/test/test_tf_importer.cpp index 3f02fb2220..66c43d6b9a 100644 --- a/modules/dnn/test/test_tf_importer.cpp +++ b/modules/dnn/test/test_tf_importer.cpp @@ -124,6 +124,7 @@ TEST_P(Test_TensorFlow_layers, conv) runTensorFlowNet("atrous_conv2d_valid", targetId); runTensorFlowNet("atrous_conv2d_same", targetId); runTensorFlowNet("depthwise_conv2d", targetId); + runTensorFlowNet("keras_atrous_conv2d_same", targetId); } TEST_P(Test_TensorFlow_layers, padding) From 2e9e71ab9e2d15d3907e5ed3dbdb985fd97ddb8c Mon Sep 17 00:00:00 2001 From: Tomoaki Teshima Date: Tue, 29 May 2018 19:18:10 +0900 Subject: [PATCH 03/40] make ocl4dnn available to run on other platform than Intel GPU --- modules/dnn/src/layers/convolution_layer.cpp | 3 +- modules/dnn/src/layers/elementwise_layers.cpp | 3 +- modules/dnn/src/layers/mvn_layer.cpp | 47 +++++++++++++++---- modules/dnn/src/layers/pooling_layer.cpp | 3 +- modules/dnn/src/opencl/mvn.cl | 15 ++++-- modules/dnn/test/test_tf_importer.cpp | 4 +- 6 files changed, 53 insertions(+), 22 deletions(-) diff --git a/modules/dnn/src/layers/convolution_layer.cpp b/modules/dnn/src/layers/convolution_layer.cpp index 96a9d5b0e3..edadcd9bff 100644 --- a/modules/dnn/src/layers/convolution_layer.cpp +++ b/modules/dnn/src/layers/convolution_layer.cpp @@ -966,8 +966,7 @@ public: CV_TRACE_FUNCTION(); CV_TRACE_ARG_VALUE(name, "name", name.c_str()); - CV_OCL_RUN(IS_DNN_OPENCL_TARGET(preferableTarget) && - OCL_PERFORMANCE_CHECK(ocl::Device::getDefault().isIntel()), + CV_OCL_RUN(IS_DNN_OPENCL_TARGET(preferableTarget), forward_ocl(inputs_arr, outputs_arr, internals_arr)) Layer::forward_fallback(inputs_arr, outputs_arr, internals_arr); diff --git a/modules/dnn/src/layers/elementwise_layers.cpp b/modules/dnn/src/layers/elementwise_layers.cpp index a24b913ba4..f57ef01375 100644 --- a/modules/dnn/src/layers/elementwise_layers.cpp +++ b/modules/dnn/src/layers/elementwise_layers.cpp @@ -176,8 +176,7 @@ public: { CV_TRACE_FUNCTION(); - CV_OCL_RUN(IS_DNN_OPENCL_TARGET(this->preferableTarget) && - OCL_PERFORMANCE_CHECK(ocl::Device::getDefault().isIntel()), + CV_OCL_RUN(IS_DNN_OPENCL_TARGET(this->preferableTarget), func.applyOCL(inputs_arr, outputs_arr, internals_arr)) Layer::forward_fallback(inputs_arr, outputs_arr, internals_arr); diff --git a/modules/dnn/src/layers/mvn_layer.cpp b/modules/dnn/src/layers/mvn_layer.cpp index 647308ae0a..9e4f0ac39c 100644 --- a/modules/dnn/src/layers/mvn_layer.cpp +++ b/modules/dnn/src/layers/mvn_layer.cpp @@ -73,7 +73,7 @@ public: virtual bool tryFuse(Ptr& top) CV_OVERRIDE { - if (preferableTarget == DNN_TARGET_OPENCL && !fuse_batch_norm) + if (!fuse_batch_norm) { top->getScaleShift(scale, shift); fuse_batch_norm = !scale.empty() || !shift.empty(); @@ -252,8 +252,7 @@ public: CV_TRACE_FUNCTION(); CV_TRACE_ARG_VALUE(name, "name", name.c_str()); - CV_OCL_RUN(IS_DNN_OPENCL_TARGET(preferableTarget) && - OCL_PERFORMANCE_CHECK(ocl::Device::getDefault().isIntel()), + CV_OCL_RUN(IS_DNN_OPENCL_TARGET(preferableTarget), forward_ocl(inputs_arr, outputs_arr, internals_arr)) Layer::forward_fallback(inputs_arr, outputs_arr, internals_arr); @@ -274,25 +273,53 @@ public: for( i = 0; i < splitDim; i++ ) newRows *= inpBlob.size[i]; - if (inpBlob.total() == newRows) + Mat inpMat = inpBlob.reshape(1, newRows); + Mat outMat = outBlob.reshape(1, newRows); + + if ( inpBlob.total() == newRows ) { // MVN is applied to single values at an every row. - outBlob.setTo(0); + if (shift.empty()) + { + outBlob.setTo(0); + } + else + { + for ( i = 0; i < newRows; i++ ) + { + outMat.row(i).setTo(((float*)shift.data)[i]); + } + } return; } - Mat inpMat = inpBlob.reshape(1, newRows); - Mat outMat = outBlob.reshape(1, newRows); - Scalar mean, dev; for ( i = 0; i < newRows; i++) { Mat inpRow = inpMat.row(i); Mat outRow = outMat.row(i); - + float weight = 1.f; + float bias = 0.f; + if (fuse_batch_norm) + { + weight = i < scale.cols ? ((float*)scale.data)[i] : weight; + bias = i < shift.cols ? ((float*)shift.data)[i] : bias; + } cv::meanStdDev(inpRow, mean, (normVariance) ? dev : noArray()); double alpha = (normVariance) ? 1/(eps + dev[0]) : 1; - inpRow.convertTo(outRow, outRow.type(), alpha, -mean[0] * alpha); + double normalizationScale = 1.0; + double normalizationShift = 0.0; + if (fuse_batch_norm) + { + normalizationScale = alpha * weight; + normalizationShift = -mean[0] * normalizationScale + bias; + } + else + { + normalizationScale = alpha; + normalizationShift = -mean[0] * alpha; + } + inpRow.convertTo(outRow, outRow.type(), normalizationScale, normalizationShift); } } } diff --git a/modules/dnn/src/layers/pooling_layer.cpp b/modules/dnn/src/layers/pooling_layer.cpp index 2bcce1d91e..548cb8acdd 100644 --- a/modules/dnn/src/layers/pooling_layer.cpp +++ b/modules/dnn/src/layers/pooling_layer.cpp @@ -191,8 +191,7 @@ public: CV_TRACE_FUNCTION(); CV_TRACE_ARG_VALUE(name, "name", name.c_str()); - CV_OCL_RUN(IS_DNN_OPENCL_TARGET(preferableTarget) && - OCL_PERFORMANCE_CHECK(ocl::Device::getDefault().isIntel()), + CV_OCL_RUN(IS_DNN_OPENCL_TARGET(preferableTarget), forward_ocl(inputs_arr, outputs_arr, internals_arr)) Layer::forward_fallback(inputs_arr, outputs_arr, internals_arr); diff --git a/modules/dnn/src/opencl/mvn.cl b/modules/dnn/src/opencl/mvn.cl index 49a8ebbe64..ffc81a8704 100644 --- a/modules/dnn/src/opencl/mvn.cl +++ b/modules/dnn/src/opencl/mvn.cl @@ -89,7 +89,8 @@ __kernel void CALC_MEAN(__global const Dtype* src, Dtype mean_val = mean[x]; vec_type src_vec = load(src, index); - vec_type dst_vec = native_powr(src_vec - (vec_type)mean_val, 2); + vec_type dst_vec = src_vec - (vec_type)mean_val; + dst_vec = dst_vec * dst_vec; store(dst_vec, dst, index); } @@ -197,10 +198,14 @@ __kernel void MEAN_FUSE(__global const T * A, const T4 a2 = vload4(i, src0_read + 2 * A_col_size); const T4 a3 = vload4(i, src0_read + 3 * A_col_size); - dot0 = native_powr(convert_float4(a0) - (Dtype4)sum.x, 2); - dot1 = native_powr(convert_float4(a1) - (Dtype4)sum.y, 2); - dot2 = native_powr(convert_float4(a2) - (Dtype4)sum.z, 2); - dot3 = native_powr(convert_float4(a3) - (Dtype4)sum.w, 2); + dot0 = convert_float4(a0) - (Dtype4)sum.x; + dot1 = convert_float4(a1) - (Dtype4)sum.y; + dot2 = convert_float4(a2) - (Dtype4)sum.z; + dot3 = convert_float4(a3) - (Dtype4)sum.w; + dot0 = dot0 * dot0; + dot1 = dot1 * dot1; + dot2 = dot2 * dot2; + dot3 = dot3 * dot3; vstore4(dot0, i, dst0_read); vstore4(dot1, i, dst0_read + A_col_size); diff --git a/modules/dnn/test/test_tf_importer.cpp b/modules/dnn/test/test_tf_importer.cpp index 84d224e33d..b3f4f4a7f9 100644 --- a/modules/dnn/test/test_tf_importer.cpp +++ b/modules/dnn/test/test_tf_importer.cpp @@ -160,10 +160,12 @@ TEST_P(Test_TensorFlow_layers, batch_norm) TEST_P(Test_TensorFlow_layers, pooling) { int targetId = GetParam(); + cv::ocl::Device d = cv::ocl::Device::getDefault(); + bool loosenFlag = targetId == DNN_TARGET_OPENCL && d.isIntel() && d.type() == cv::ocl::Device::TYPE_CPU; runTensorFlowNet("max_pool_even", targetId); runTensorFlowNet("max_pool_odd_valid", targetId); runTensorFlowNet("ave_pool_same", targetId); - runTensorFlowNet("max_pool_odd_same", targetId); + runTensorFlowNet("max_pool_odd_same", targetId, false, loosenFlag ? 3e-5 : 1e-5, loosenFlag ? 3e-4 : 1e-4); runTensorFlowNet("reduce_mean", targetId); // an average pooling over all spatial dimensions. } From 5128c1ff1fd4f872e7dd6a595ada0e7aee548dfe Mon Sep 17 00:00:00 2001 From: Alexander Alekhin Date: Tue, 29 May 2018 11:28:38 +0000 Subject: [PATCH 04/40] videoio(ffmpeg): specify stream->time_base Using codec->time_base is deprecated to specify muxer settings. Resolves issue with FPS value for AVI files with FFmpeg 4.0. Related FFmpeg commits: - https://github.com/FFmpeg/FFmpeg/commit/194be1f43ea391eb986732707435176e579265aa - https://github.com/FFmpeg/FFmpeg/commit/91736025b2807995e29bd0661807c1c84f515fef --- modules/videoio/src/cap_ffmpeg_impl.hpp | 3 +++ modules/videoio/test/test_ffmpeg.cpp | 2 ++ 2 files changed, 5 insertions(+) diff --git a/modules/videoio/src/cap_ffmpeg_impl.hpp b/modules/videoio/src/cap_ffmpeg_impl.hpp index 0317831d3a..af26218941 100644 --- a/modules/videoio/src/cap_ffmpeg_impl.hpp +++ b/modules/videoio/src/cap_ffmpeg_impl.hpp @@ -1587,6 +1587,9 @@ static AVStream *icv_add_video_stream_FFMPEG(AVFormatContext *oc, #if LIBAVCODEC_BUILD >= CALC_FFMPEG_VERSION(52, 42, 0) st->avg_frame_rate = (AVRational){frame_rate, frame_rate_base}; #endif +#if LIBAVFORMAT_BUILD >= CALC_FFMPEG_VERSION(55, 20, 0) + st->time_base = c->time_base; +#endif return st; } diff --git a/modules/videoio/test/test_ffmpeg.cpp b/modules/videoio/test/test_ffmpeg.cpp index 2baeeb8b41..6da1273353 100644 --- a/modules/videoio/test/test_ffmpeg.cpp +++ b/modules/videoio/test/test_ffmpeg.cpp @@ -357,6 +357,8 @@ public: for (unsigned int i = 0; i < frameCount && next; ++i) { + SCOPED_TRACE(cv::format("frame=%d", (int)frameCount)); + Mat actual; (*capture) >> actual; From cd9e43704e0082c0fdd4bdef2d2631daa531fe2e Mon Sep 17 00:00:00 2001 From: Alexander Alekhin Date: Mon, 21 May 2018 14:06:31 +0000 Subject: [PATCH 05/40] videoio: backends priority list --- modules/videoio/CMakeLists.txt | 2 + modules/videoio/src/cap.cpp | 673 +++------------------- modules/videoio/src/cap_mjpeg_encoder.cpp | 5 +- modules/videoio/src/precomp.hpp | 7 +- modules/videoio/src/videoio_c.cpp | 152 +++++ modules/videoio/src/videoio_registry.cpp | 637 ++++++++++++++++++++ modules/videoio/src/videoio_registry.hpp | 43 ++ modules/videoio/test/test_ffmpeg.cpp | 12 +- 8 files changed, 944 insertions(+), 587 deletions(-) create mode 100644 modules/videoio/src/videoio_c.cpp create mode 100644 modules/videoio/src/videoio_registry.cpp create mode 100644 modules/videoio/src/videoio_registry.hpp diff --git a/modules/videoio/CMakeLists.txt b/modules/videoio/CMakeLists.txt index 02d2bd830f..c6fee91924 100644 --- a/modules/videoio/CMakeLists.txt +++ b/modules/videoio/CMakeLists.txt @@ -20,6 +20,8 @@ set(videoio_hdrs ${CMAKE_CURRENT_LIST_DIR}/src/precomp.hpp ) set(videoio_srcs + ${CMAKE_CURRENT_LIST_DIR}/src/videoio_registry.cpp + ${CMAKE_CURRENT_LIST_DIR}/src/videoio_c.cpp ${CMAKE_CURRENT_LIST_DIR}/src/cap.cpp ${CMAKE_CURRENT_LIST_DIR}/src/cap_images.cpp ${CMAKE_CURRENT_LIST_DIR}/src/cap_mjpeg_encoder.cpp diff --git a/modules/videoio/src/cap.cpp b/modules/videoio/src/cap.cpp index 25e1208ad6..693f32e6f2 100644 --- a/modules/videoio/src/cap.cpp +++ b/modules/videoio/src/cap.cpp @@ -40,39 +40,10 @@ //M*/ #include "precomp.hpp" -#include -using namespace std; -#include "cap_intelperc.hpp" -#include "cap_dshow.hpp" - -#ifdef HAVE_MFX -#include "cap_mfx_reader.hpp" -#include "cap_mfx_writer.hpp" -#endif - -// All WinRT versions older than 8.0 should provide classes used for video support -#if defined(WINRT) && !defined(WINRT_8_0) && defined(__cplusplus_winrt) -# include "cap_winrt_capture.hpp" -# include "cap_winrt_bridge.hpp" -# define WINRT_VIDEO -#endif - -#if defined _M_X64 && defined _MSC_VER && !defined CV_ICC -#pragma optimize("",off) -#pragma warning(disable: 4748) -#endif -#if defined(__clang__) -#pragma clang diagnostic ignored "-Wimplicit-fallthrough" -#endif -#if defined(__GNUC__) && __GNUC__ >= 7 -#pragma GCC diagnostic ignored "-Wimplicit-fallthrough" -#endif +#include "videoio_registry.hpp" -using namespace cv; - -namespace cv -{ +namespace cv { template<> void DefaultDeleter::operator ()(CvCapture* obj) const { cvReleaseCapture(&obj); } @@ -80,531 +51,7 @@ template<> void DefaultDeleter::operator ()(CvCapture* obj) const template<> void DefaultDeleter::operator ()(CvVideoWriter* obj) const { cvReleaseVideoWriter(&obj); } -} - -/************************* Reading AVIs & Camera data **************************/ - -static inline double icvGetCaptureProperty( const CvCapture* capture, int id ) -{ - return capture ? capture->getProperty(id) : 0; -} - -CV_IMPL void cvReleaseCapture( CvCapture** pcapture ) -{ - if( pcapture && *pcapture ) - { - delete *pcapture; - *pcapture = 0; - } -} - -CV_IMPL IplImage* cvQueryFrame( CvCapture* capture ) -{ - if(!capture) - return 0; - if(!capture->grabFrame()) - return 0; - return capture->retrieveFrame(0); -} - - -CV_IMPL int cvGrabFrame( CvCapture* capture ) -{ - return capture ? capture->grabFrame() : 0; -} - -CV_IMPL IplImage* cvRetrieveFrame( CvCapture* capture, int idx ) -{ - return capture ? capture->retrieveFrame(idx) : 0; -} - -CV_IMPL double cvGetCaptureProperty( CvCapture* capture, int id ) -{ - return icvGetCaptureProperty(capture, id); -} - -CV_IMPL int cvSetCaptureProperty( CvCapture* capture, int id, double value ) -{ - return capture ? capture->setProperty(id, value) : 0; -} - -CV_IMPL int cvGetCaptureDomain( CvCapture* capture) -{ - return capture ? capture->getCaptureDomain() : 0; -} - -static bool get_capture_debug_flag() -{ - static bool initialized = false; - static bool flag = false; - if (!initialized) - { -#ifndef NO_GETENV - flag = getenv("OPENCV_VIDEOCAPTURE_DEBUG") ? true : false; // TODO Use getBoolParameter -#endif - initialized = true; - } - return flag; -} - -#define TRY_OPEN(capture, backend_func) \ -{ \ - if (!capture) \ - CV_TRY { \ - if (get_capture_debug_flag()) fprintf(stderr, "VIDEOIO(%s): trying ...\n", #backend_func); \ - capture = backend_func; \ - if (get_capture_debug_flag()) fprintf(stderr, "VIDEOIO(%s): result=%p ...\n", #backend_func, capture); \ - } CV_CATCH (cv::Exception, e) { \ - fprintf(stderr, "VIDEOIO(%s): raised OpenCV exception:\n\n%s\n", #backend_func, e.what()); \ - } CV_CATCH (std::exception, e) { \ - fprintf(stderr, "VIDEOIO(%s): raised C++ exception:\n\n%s\n", #backend_func, e.what()); \ - } CV_CATCH_ALL { \ - fprintf(stderr, "VIDEOIO(%s): raised unknown C++ exception!\n\n", #backend_func); \ - } \ -} - - -/** - * Camera dispatching method: index is the camera number. - * If given an index from 0 to 99, it tries to find the first - * API that can access a given camera index. - * Add multiples of 100 to select an API. - */ -CV_IMPL CvCapture * cvCreateCameraCapture (int index) -{ - // interpret preferred interface (0 = autodetect) - int pref = (index / 100) * 100; - - // remove pref from index - index -= pref; - // local variable to memorize the captured device - CvCapture *capture = 0; - - switch (pref) - { - default: - // user specified an API we do not know - // bail out to let the user know that it is not available - if (pref) break; - - case CAP_VFW: // or CAP_V4L or CAP_V4L2 -#ifdef HAVE_VFW - TRY_OPEN(capture, cvCreateCameraCapture_VFW(index)) -#endif - -#if defined HAVE_LIBV4L || defined HAVE_CAMV4L || defined HAVE_CAMV4L2 || defined HAVE_VIDEOIO - TRY_OPEN(capture, cvCreateCameraCapture_V4L(index)) -#endif - - if (pref) break; // CAP_VFW or CAP_V4L or CAP_V4L2 - - case CAP_FIREWIRE: -#ifdef HAVE_DC1394_2 - TRY_OPEN(capture, cvCreateCameraCapture_DC1394_2(index)) -#endif - -#ifdef HAVE_DC1394 - TRY_OPEN(capture, cvCreateCameraCapture_DC1394(index)) -#endif - -#ifdef HAVE_CMU1394 - TRY_OPEN(capture, cvCreateCameraCapture_CMU(index)) -#endif - - if (pref) break; // CAP_FIREWIRE - -#ifdef HAVE_MIL - case CAP_MIL: - TRY_OPEN(capture, cvCreateCameraCapture_MIL(index)) - if (pref) break; -#endif - -#if defined(HAVE_QUICKTIME) || defined(HAVE_QTKIT) - case CAP_QT: - TRY_OPEN(capture, cvCreateCameraCapture_QT(index)) - if (pref) break; -#endif - -#ifdef HAVE_UNICAP - case CAP_UNICAP: - TRY_OPEN(capture, cvCreateCameraCapture_Unicap(index)) - if (pref) break; -#endif - -#ifdef HAVE_PVAPI - case CAP_PVAPI: - TRY_OPEN(capture, cvCreateCameraCapture_PvAPI(index)) - if (pref) break; -#endif - -#ifdef HAVE_OPENNI - case CAP_OPENNI: - TRY_OPEN(capture, cvCreateCameraCapture_OpenNI(index)) - if (pref) break; -#endif - -#ifdef HAVE_OPENNI2 - case CAP_OPENNI2: - TRY_OPEN(capture, cvCreateCameraCapture_OpenNI2(index)) - if (pref) break; -#endif - -#ifdef HAVE_XIMEA - case CAP_XIAPI: - TRY_OPEN(capture, cvCreateCameraCapture_XIMEA(index)) - if (pref) break; -#endif - -#ifdef HAVE_AVFOUNDATION - case CAP_AVFOUNDATION: - TRY_OPEN(capture, cvCreateCameraCapture_AVFoundation(index)) - if (pref) break; -#endif - -#ifdef HAVE_GIGE_API - case CAP_GIGANETIX: - TRY_OPEN(capture, cvCreateCameraCapture_Giganetix(index)) - if (pref) break; // CAP_GIGANETIX -#endif - -#ifdef HAVE_ARAVIS_API - case CAP_ARAVIS: - TRY_OPEN(capture, cvCreateCameraCapture_Aravis(index)) - if (pref) break; -#endif - } - - return capture; -} - -/** - * Videoreader dispatching method: it tries to find the first - * API that can access a given filename. - */ -CV_IMPL CvCapture * cvCreateFileCaptureWithPreference (const char * filename, int apiPreference) -{ - CvCapture * result = 0; - - switch(apiPreference) { - default: - // user specified an API we do not know - // bail out to let the user know that it is not available - if (apiPreference) break; - -#if defined HAVE_LIBV4L || defined HAVE_CAMV4L || defined HAVE_CAMV4L2 || defined HAVE_VIDEOIO - case CAP_V4L: - TRY_OPEN(result, cvCreateCameraCapture_V4L(filename)) - if (apiPreference) break; -#endif - -#ifdef HAVE_VFW - case CAP_VFW: - TRY_OPEN(result, cvCreateFileCapture_VFW (filename)) - if (apiPreference) break; -#endif - -#if defined(HAVE_QUICKTIME) || defined(HAVE_QTKIT) - case CAP_QT: - TRY_OPEN(result, cvCreateFileCapture_QT (filename)) - if (apiPreference) break; -#endif - -#ifdef HAVE_AVFOUNDATION - case CAP_AVFOUNDATION: - TRY_OPEN(result, cvCreateFileCapture_AVFoundation (filename)) - if (apiPreference) break; -#endif - -#ifdef HAVE_OPENNI - case CAP_OPENNI: - TRY_OPEN(result, cvCreateFileCapture_OpenNI (filename)) - if (apiPreference) break; -#endif - -#ifdef HAVE_OPENNI2 - case CAP_OPENNI2: - TRY_OPEN(result, cvCreateFileCapture_OpenNI2 (filename)) - if (apiPreference) break; -#endif -#ifdef HAVE_XIMEA - case CAP_XIAPI: - TRY_OPEN(result, cvCreateCameraCapture_XIMEA(filename)) - if (apiPreference) break; -#endif - case CAP_IMAGES: - TRY_OPEN(result, cvCreateFileCapture_Images (filename)) - } - - return result; -} - -CV_IMPL CvCapture * cvCreateFileCapture (const char * filename) -{ - return cvCreateFileCaptureWithPreference(filename, CAP_ANY); -} - -/** - * Videowriter dispatching method: it tries to find the first - * API that can write a given stream. - */ -static CvVideoWriter* cvCreateVideoWriterWithPreference(const char* filename, int apiPreference, int fourcc, - double fps, CvSize frameSize, int is_color ) -{ - CV_UNUSED(frameSize); - CV_UNUSED(is_color); - - CvVideoWriter *result = 0; - - if(!fourcc || !fps) - TRY_OPEN(result, cvCreateVideoWriter_Images(filename)) - - CV_Assert(result || fps != 0); - - switch(apiPreference) - { - default: - //exit if the specified API is unavaliable - if (apiPreference != CAP_ANY) break; - #ifdef HAVE_VFW - case CAP_VFW: - TRY_OPEN(result, cvCreateVideoWriter_VFW(filename, fourcc, fps, frameSize, is_color)) - if (apiPreference != CAP_ANY) break; - #endif - #ifdef HAVE_AVFOUNDATION - case CAP_AVFOUNDATION: - TRY_OPEN(result, cvCreateVideoWriter_AVFoundation(filename, fourcc, fps, frameSize, is_color)) - if (apiPreference != CAP_ANY) break; - #endif - #if defined(HAVE_QUICKTIME) || defined(HAVE_QTKIT) - case(CAP_QT): - TRY_OPEN(result, cvCreateVideoWriter_QT(filename, fourcc, fps, frameSize, is_color)) - if (apiPreference != CAP_ANY) break; - #endif - #ifdef HAVE_GSTREAMER - case CAP_GSTREAMER: - TRY_OPEN(result, cvCreateVideoWriter_GStreamer (filename, fourcc, fps, frameSize, is_color)) - if (apiPreference != CAP_ANY) break; - #endif - case CAP_IMAGES: - TRY_OPEN(result, cvCreateVideoWriter_Images(filename)) - if (apiPreference != CAP_ANY) break; - } - - return result; -} - -CV_IMPL CvVideoWriter* cvCreateVideoWriter( const char* filename, int fourcc, - double fps, CvSize frameSize, int is_color ) -{ - return cvCreateVideoWriterWithPreference(filename, CAP_ANY, fourcc, fps, frameSize, is_color); -} - -CV_IMPL int cvWriteFrame( CvVideoWriter* writer, const IplImage* image ) -{ - return writer ? writer->writeFrame(image) : 0; -} - -CV_IMPL void cvReleaseVideoWriter( CvVideoWriter** pwriter ) -{ - if( pwriter && *pwriter ) - { - delete *pwriter; - *pwriter = 0; - } -} - -namespace cv -{ - -static Ptr IVideoCapture_create(int index) -{ - int domains[] = - { -#ifdef HAVE_GSTREAMER - CAP_GSTREAMER, -#endif -#ifdef HAVE_MSMF - CAP_MSMF, -#endif -#ifdef HAVE_DSHOW - CAP_DSHOW, -#endif -#ifdef HAVE_INTELPERC - CAP_INTELPERC, -#endif -#ifdef WINRT_VIDEO - CAP_WINRT, -#endif -#ifdef HAVE_GPHOTO2 - CAP_GPHOTO2, -#endif - -1, -1 - }; - - // interpret preferred interface (0 = autodetect) - int pref = (index / 100) * 100; - if (pref) - { - domains[0]=pref; - index %= 100; - domains[1]=-1; - } - - // try every possibly installed camera API - for (int i = 0; domains[i] >= 0; i++) - { -#if defined(HAVE_GSTREAMER) || \ - defined(HAVE_MSMF) || \ - defined(HAVE_DSHOW) || \ - defined(HAVE_INTELPERC) || \ - defined(WINRT_VIDEO) || \ - defined(HAVE_GPHOTO2) || \ - (0) - Ptr capture; - - switch (domains[i]) - { -#ifdef HAVE_GSTREAMER - case CAP_GSTREAMER: - capture = createGStreamerCapture(index); - break; -#endif -#ifdef HAVE_MSMF - case CAP_MSMF: - capture = cvCreateCapture_MSMF(index); - break; // CAP_MSMF -#endif -#ifdef HAVE_DSHOW - case CAP_DSHOW: - capture = makePtr(index); - break; // CAP_DSHOW -#endif -#ifdef HAVE_INTELPERC - case CAP_INTELPERC: - capture = makePtr(); - break; // CAP_INTEL_PERC -#endif -#ifdef WINRT_VIDEO - case CAP_WINRT: - capture = Ptr(new cv::VideoCapture_WinRT(index)); - if (capture) - return capture; - break; // CAP_WINRT -#endif -#ifdef HAVE_GPHOTO2 - case CAP_GPHOTO2: - capture = createGPhoto2Capture(index); - break; -#endif - } - if (capture && capture->isOpened()) - return capture; -#endif - } - - // failed open a camera - return Ptr(); -} - - -static Ptr IVideoCapture_create(const String& filename, int apiPreference) -{ - bool useAny = (apiPreference == CAP_ANY); - Ptr capture; -#ifdef HAVE_FFMPEG - if (useAny || apiPreference == CAP_FFMPEG) - { - capture = cvCreateFileCapture_FFMPEG_proxy(filename); - if (capture && capture->isOpened()) - return capture; - } -#endif -#ifdef HAVE_GSTREAMER - if (useAny || apiPreference == CAP_GSTREAMER) - { - capture = createGStreamerCapture(filename); - if (capture && capture->isOpened()) - return capture; - } -#endif -#ifdef HAVE_XINE - if (useAny || apiPreference == CAP_XINE) - { - capture = createXINECapture(filename.c_str()); - if (capture && capture->isOpened()) - return capture; - } -#endif -#ifdef HAVE_MSMF - if (useAny || apiPreference == CAP_MSMF) - { - capture = cvCreateCapture_MSMF(filename); - if (capture && capture->isOpened()) - return capture; - } -#endif -#ifdef HAVE_GPHOTO2 - if (useAny || apiPreference == CAP_GPHOTO2) - { - capture = createGPhoto2Capture(filename); - if (capture && capture->isOpened()) - return capture; - } -#endif -#ifdef HAVE_MFX - if (useAny || apiPreference == CAP_INTEL_MFX) - { - capture = makePtr(filename); - if (capture && capture->isOpened()) - return capture; - } -#endif - if (useAny || apiPreference == CAP_OPENCV_MJPEG) - { - capture = createMotionJpegCapture(filename); - if (capture && capture->isOpened()) - return capture; - } - if (capture && !capture->isOpened()) - capture.release(); - return capture; -} - -static Ptr IVideoWriter_create(const String& filename, int apiPreference, int _fourcc, double fps, Size frameSize, bool isColor) -{ - Ptr iwriter; -#ifdef HAVE_FFMPEG - if (apiPreference == CAP_FFMPEG || apiPreference == CAP_ANY) - { - iwriter = cvCreateVideoWriter_FFMPEG_proxy(filename, _fourcc, fps, frameSize, isColor); - if (!iwriter.empty()) - return iwriter; - } -#endif -#ifdef HAVE_MSMF - if (apiPreference == CAP_MSMF || apiPreference == CAP_ANY) - { - iwriter = cvCreateVideoWriter_MSMF(filename, _fourcc, fps, frameSize, isColor); - if (!iwriter.empty()) - return iwriter; - } -#endif -#ifdef HAVE_MFX - if (apiPreference == CAP_INTEL_MFX || apiPreference == CAP_ANY) - { - iwriter = VideoWriter_IntelMFX::create(filename, _fourcc, fps, frameSize, isColor); - if (!iwriter.empty()) - return iwriter; - } -#endif - - if( (apiPreference == CAP_OPENCV_MJPEG || apiPreference == CAP_ANY) - && _fourcc == CV_FOURCC('M', 'J', 'P', 'G') ) - iwriter = createMotionJpegWriter(filename, fps, frameSize, isColor); - - return iwriter; -} VideoCapture::VideoCapture() {} @@ -640,12 +87,30 @@ bool VideoCapture::open(const String& filename, int apiPreference) CV_TRACE_FUNCTION(); if (isOpened()) release(); - icap = IVideoCapture_create(filename, apiPreference); - if (!icap.empty()) - return true; - cap.reset(cvCreateFileCaptureWithPreference(filename.c_str(), apiPreference)); - return isOpened(); + const std::vector backends = cv::videoio_registry::getAvailableBackends_CaptureByFilename(); + for (size_t i = 0; i < backends.size(); i++) + { + const VideoBackendInfo& info = backends[i]; + if (apiPreference == CAP_ANY || apiPreference == info.id) + { + CvCapture* capture = NULL; + VideoCapture_create(capture, icap, info.id, filename); + if (!icap.empty()) + { + if (icap->isOpened()) + return true; + icap.release(); + } + if (capture) + { + cap.reset(capture); + // assume it is opened + return true; + } + } + } + return false; } bool VideoCapture::open(const String& filename) @@ -655,28 +120,56 @@ bool VideoCapture::open(const String& filename) return open(filename, CAP_ANY); } -bool VideoCapture::open(int index) +bool VideoCapture::open(int cameraNum, int apiPreference) { CV_TRACE_FUNCTION(); if (isOpened()) release(); - icap = IVideoCapture_create(index); - if (!icap.empty()) - return true; - cap.reset(cvCreateCameraCapture(index)); - return isOpened(); + + const std::vector backends = cv::videoio_registry::getAvailableBackends_CaptureByIndex(); + for (size_t i = 0; i < backends.size(); i++) + { + const VideoBackendInfo& info = backends[i]; + if (apiPreference == CAP_ANY || apiPreference == info.id) + { + CvCapture* capture = NULL; + VideoCapture_create(capture, icap, info.id, cameraNum); + if (!icap.empty()) + { + if (icap->isOpened()) + return true; + icap.release(); + } + if (capture) + { + cap.reset(capture); + // assume it is opened + return true; + } + } + } + return false; } -bool VideoCapture::open(int cameraNum, int apiPreference) + +bool VideoCapture::open(int index) { CV_TRACE_FUNCTION(); - cameraNum = cameraNum + apiPreference; - return open(cameraNum); + // interpret preferred interface (0 = autodetect) + int backendID = (index / 100) * 100; + if (backendID) + { + index %= 100; + } + + return open(index, backendID); } bool VideoCapture::isOpened() const { - return (!cap.empty() || !icap.empty()); + if (!icap.empty()) + return icap->isOpened(); + return !icap.empty(); // legacy interface doesn't support closed files } void VideoCapture::release() @@ -732,6 +225,7 @@ bool VideoCapture::read(OutputArray image) VideoCapture& VideoCapture::operator >> (Mat& image) { #ifdef WINRT_VIDEO + // FIXIT grab/retrieve methods() should work too if (grab()) { if (retrieve(image)) @@ -753,7 +247,6 @@ VideoCapture& VideoCapture::operator >> (Mat& image) #else read(image); #endif - return *this; } @@ -776,10 +269,14 @@ double VideoCapture::get(int propId) const { if (!icap.empty()) return icap->getProperty(propId); - return icvGetCaptureProperty(cap, propId); + return cap ? cap->getProperty(propId) : 0; } +//================================================================================================= + + + VideoWriter::VideoWriter() {} @@ -815,11 +312,30 @@ bool VideoWriter::open(const String& filename, int apiPreference, int _fourcc, d CV_INSTRUMENT_REGION() if (isOpened()) release(); - iwriter = IVideoWriter_create(filename, apiPreference, _fourcc, fps, frameSize, isColor); - if (!iwriter.empty()) - return true; - writer.reset(cvCreateVideoWriterWithPreference(filename.c_str(), apiPreference, _fourcc, fps, frameSize, isColor)); - return isOpened(); + + const std::vector backends = cv::videoio_registry::getAvailableBackends_Writer(); + for (size_t i = 0; i < backends.size(); i++) + { + const VideoBackendInfo& info = backends[i]; + if (apiPreference == CAP_ANY || apiPreference == info.id) + { + CvVideoWriter* writer_ = NULL; + VideoWriter_create(writer_, iwriter, info.id, filename, _fourcc, fps, frameSize, isColor); + if (!iwriter.empty()) + { + if (iwriter->isOpened()) + return true; + iwriter.release(); + } + if (writer_) + { + // assume it is opened + writer.reset(writer_); + return true; + } + } + } + return false; } bool VideoWriter::isOpened() const @@ -863,9 +379,10 @@ VideoWriter& VideoWriter::operator << (const Mat& image) return *this; } +// FIXIT OpenCV 4.0: make inline int VideoWriter::fourcc(char c1, char c2, char c3, char c4) { return (c1 & 255) + ((c2 & 255) << 8) + ((c3 & 255) << 16) + ((c4 & 255) << 24); } -} +} // namespace diff --git a/modules/videoio/src/cap_mjpeg_encoder.cpp b/modules/videoio/src/cap_mjpeg_encoder.cpp index b564f608b9..fb1ded4997 100644 --- a/modules/videoio/src/cap_mjpeg_encoder.cpp +++ b/modules/videoio/src/cap_mjpeg_encoder.cpp @@ -1530,8 +1530,11 @@ void MotionJpegWriter::writeFrameData( const uchar* data, int step, int colorspa } -Ptr createMotionJpegWriter( const String& filename, double fps, Size frameSize, bool iscolor ) +Ptr createMotionJpegWriter(const String& filename, int fourcc, double fps, Size frameSize, bool iscolor) { + if (fourcc != CV_FOURCC('M', 'J', 'P', 'G')) + return Ptr(); + Ptr iwriter = makePtr(filename, fps, frameSize, iscolor); if( !iwriter->isOpened() ) iwriter.release(); diff --git a/modules/videoio/src/precomp.hpp b/modules/videoio/src/precomp.hpp index c08a224b05..a664aa7448 100644 --- a/modules/videoio/src/precomp.hpp +++ b/modules/videoio/src/precomp.hpp @@ -47,6 +47,9 @@ #include "opencv2/core/utility.hpp" #include "opencv2/core/private.hpp" +#include +#include + #include "opencv2/imgcodecs.hpp" #include "opencv2/imgproc.hpp" @@ -59,7 +62,7 @@ #include #include #include -#include +#include // FIXIT remove this #if defined _WIN32 || defined WINCE #if !defined _WIN32_WINNT @@ -178,7 +181,7 @@ namespace cv }; Ptr createMotionJpegCapture(const String& filename); - Ptr createMotionJpegWriter( const String& filename, double fps, Size frameSize, bool iscolor ); + Ptr createMotionJpegWriter(const String& filename, int fourcc, double fps, Size frameSize, bool iscolor); Ptr createGPhoto2Capture(int index); Ptr createGPhoto2Capture(const String& deviceName); diff --git a/modules/videoio/src/videoio_c.cpp b/modules/videoio/src/videoio_c.cpp new file mode 100644 index 0000000000..59a45225bd --- /dev/null +++ b/modules/videoio/src/videoio_c.cpp @@ -0,0 +1,152 @@ +// This file is part of OpenCV project. +// It is subject to the license terms in the LICENSE file found in the top-level directory +// of this distribution and at http://opencv.org/license.html. + +#include "precomp.hpp" + +#include "videoio_registry.hpp" + +using namespace cv; + +// Legacy C-like API + +CV_IMPL CvCapture* cvCreateCameraCapture(int index) +{ + // interpret preferred interface (0 = autodetect) + int apiPreference = (index / 100) * 100; + if (apiPreference) + { + index %= 100; + } + + const std::vector backends = cv::videoio_registry::getAvailableBackends_CaptureByIndex(); + for (size_t i = 0; i < backends.size(); i++) + { + const VideoBackendInfo& info = backends[i]; + if (apiPreference == CAP_ANY || apiPreference == info.id) + { + CvCapture* capture = NULL; + Ptr icap; // unused + VideoCapture_create(capture, icap, info.id, index); + if (capture) + { + return capture; + } + if (!icap.empty()) + { + CV_LOG_WARNING(NULL, "cvCreateFileCaptureWithPreference: backend " << info.name << " doesn't support legacy API anymore.") + } + } + } + return NULL; +} + +CV_IMPL CvCapture* cvCreateFileCaptureWithPreference(const char* filename, int apiPreference) +{ + const std::vector backends = cv::videoio_registry::getAvailableBackends_CaptureByFilename(); + for (size_t i = 0; i < backends.size(); i++) + { + const VideoBackendInfo& info = backends[i]; + if (apiPreference == CAP_ANY || apiPreference == info.id) + { + CvCapture* capture = NULL; + Ptr icap; // unused + VideoCapture_create(capture, icap, info.id, filename); + if (capture) + { + return capture; + } + if (!icap.empty()) + { + CV_LOG_WARNING(NULL, "cvCreateFileCaptureWithPreference: backend " << info.name << " doesn't support legacy API anymore.") + } + } + } + return NULL; +} + +CV_IMPL CvCapture* cvCreateFileCapture(const char * filename) +{ + return cvCreateFileCaptureWithPreference(filename, CAP_ANY); +} + +CV_IMPL CvVideoWriter* cvCreateVideoWriter(const char* filename, int fourcc, + double fps, CvSize frameSize, int is_color) +{ + const std::vector backends = cv::videoio_registry::getAvailableBackends_Writer(); + for (size_t i = 0; i < backends.size(); i++) + { + const VideoBackendInfo& info = backends[i]; + { + CvVideoWriter* writer_ = NULL; + Ptr iwriter; // unused + VideoWriter_create(writer_, iwriter, info.id, filename, fourcc, fps, frameSize, is_color != 0); + if (writer_) + { + return writer_; + } + if (!iwriter.empty()) + { + CV_LOG_WARNING(NULL, "cvCreateVideoWriter: backend " << info.name << " doesn't support legacy API anymore.") + } + } + } + return NULL; +} + +CV_IMPL int cvWriteFrame(CvVideoWriter* writer, const IplImage* image) +{ + return writer ? writer->writeFrame(image) : 0; +} + +CV_IMPL void cvReleaseVideoWriter(CvVideoWriter** pwriter) +{ + if( pwriter && *pwriter ) + { + delete *pwriter; + *pwriter = 0; + } +} + +CV_IMPL void cvReleaseCapture(CvCapture** pcapture) +{ + if (pcapture && *pcapture) + { + delete *pcapture; + *pcapture = 0; + } +} + +CV_IMPL IplImage* cvQueryFrame(CvCapture* capture) +{ + if (!capture) + return 0; + if (!capture->grabFrame()) + return 0; + return capture->retrieveFrame(0); +} + +CV_IMPL int cvGrabFrame(CvCapture* capture) +{ + return capture ? capture->grabFrame() : 0; +} + +CV_IMPL IplImage* cvRetrieveFrame(CvCapture* capture, int idx) +{ + return capture ? capture->retrieveFrame(idx) : 0; +} + +CV_IMPL double cvGetCaptureProperty(CvCapture* capture, int id) +{ + return capture ? capture->getProperty(id) : 0; +} + +CV_IMPL int cvSetCaptureProperty(CvCapture* capture, int id, double value) +{ + return capture ? capture->setProperty(id, value) : 0; +} + +CV_IMPL int cvGetCaptureDomain(CvCapture* capture) +{ + return capture ? capture->getCaptureDomain() : 0; +} diff --git a/modules/videoio/src/videoio_registry.cpp b/modules/videoio/src/videoio_registry.cpp new file mode 100644 index 0000000000..484ebe30bc --- /dev/null +++ b/modules/videoio/src/videoio_registry.cpp @@ -0,0 +1,637 @@ +// This file is part of OpenCV project. +// It is subject to the license terms in the LICENSE file found in the top-level directory +// of this distribution and at http://opencv.org/license.html. + +#include "precomp.hpp" + +#include "videoio_registry.hpp" + +#include "cap_intelperc.hpp" +#include "cap_dshow.hpp" + +#ifdef HAVE_MFX +#include "cap_mfx_reader.hpp" +#include "cap_mfx_writer.hpp" +#endif + +// All WinRT versions older than 8.0 should provide classes used for video support +#if defined(WINRT) && !defined(WINRT_8_0) && defined(__cplusplus_winrt) +# include "cap_winrt_capture.hpp" +# include "cap_winrt_bridge.hpp" +# define WINRT_VIDEO +#endif + +#if defined _M_X64 && defined _MSC_VER && !defined CV_ICC +#pragma optimize("",off) +#pragma warning(disable: 4748) +#endif + +using namespace cv; + +namespace cv +{ + +static bool param_VIDEOIO_DEBUG = utils::getConfigurationParameterBool("OPENCV_VIDEOIO_DEBUG", false); +static bool param_VIDEOCAPTURE_DEBUG = utils::getConfigurationParameterBool("OPENCV_VIDEOCAPTURE_DEBUG", false); +static bool param_VIDEOWRITER_DEBUG = utils::getConfigurationParameterBool("OPENCV_VIDEOWRITER_DEBUG", false); + +namespace { + +#define DECLARE_BACKEND(cap, name, mode) { cap, (BackendMode)(mode), 1000, name } + +/** Ordering guidelines: +- modern optimized, multi-platform libraries: ffmpeg, gstreamer, Media SDK +- platform specific universal SDK: WINRT, QTKIT/AVFOUNDATION, MSMF/VFW/DSHOW, V4L/V4L2 +- RGB-D: OpenNI/OpenNI2, INTELPERC/REALSENSE +- special OpenCV (file-based): "images", "mjpeg" +- special camera SDKs, including stereo: other special SDKs: FIREWIRE/1394, XIMEA/ARAVIS/GIGANETIX/PVAPI(GigE), UNICAP +- other: XINE, gphoto2, etc +*/ +static const struct VideoBackendInfo builtin_backends[] = +{ +#ifdef HAVE_FFMPEG + DECLARE_BACKEND(CAP_FFMPEG, "FFMPEG", MODE_CAPTURE_BY_FILENAME | MODE_WRITER), +#endif +#ifdef HAVE_GSTREAMER + DECLARE_BACKEND(CAP_GSTREAMER, "GSTREAMER", MODE_CAPTURE_ALL | MODE_WRITER), +#endif +#ifdef HAVE_MFX // Media SDK + DECLARE_BACKEND(CAP_INTEL_MFX, "INTEL_MFX", MODE_CAPTURE_BY_FILENAME | MODE_WRITER), +#endif + + + // Apple platform +#if defined(HAVE_QUICKTIME) || defined(HAVE_QTKIT) + DECLARE_BACKEND(CAP_QT, "QUICKTIME", MODE_CAPTURE_ALL | MODE_WRITER), +#endif +#ifdef HAVE_AVFOUNDATION + DECLARE_BACKEND(CAP_AVFOUNDATION, "AVFOUNDATION", MODE_CAPTURE_ALL | MODE_WRITER), +#endif + + // Windows +#ifdef WINRT_VIDEO + DECLARE_BACKEND(CAP_WINRT, "WINRT", MODE_CAPTURE_BY_FILENAME), +#endif +#ifdef HAVE_MSMF + DECLARE_BACKEND(CAP_MSMF, "MSMF", MODE_CAPTURE_ALL | MODE_WRITER), +#endif +#ifdef HAVE_VFW + DECLARE_BACKEND(CAP_VFW, "VFW", MODE_CAPTURE_ALL | MODE_WRITER), +#endif +#ifdef HAVE_DSHOW + DECLARE_BACKEND(CAP_DSHOW, "DSHOW", MODE_CAPTURE_ALL), +#endif + + // Linux, some Unix +#if defined HAVE_CAMV4L2 + DECLARE_BACKEND(CAP_V4L2, "V4L2", MODE_CAPTURE_ALL), +#elif defined HAVE_LIBV4L || defined HAVE_CAMV4L + DECLARE_BACKEND(CAP_V4L, "V4L", MODE_CAPTURE_ALL), +#endif + + + // RGB-D universal +#ifdef HAVE_OPENNI + DECLARE_BACKEND(CAP_OPENNI, "OPENNI", MODE_CAPTURE_ALL), +#endif +#ifdef HAVE_OPENNI2 + DECLARE_BACKEND(CAP_OPENNI2, "OPENNI2", MODE_CAPTURE_ALL), +#endif +#ifdef HAVE_INTELPERC + DECLARE_BACKEND(CAP_INTELPERC, "INTEL_PERC", MODE_CAPTURE_ALL), +#endif + + // OpenCV file-based only + DECLARE_BACKEND(CAP_IMAGES, "CV_IMAGES", MODE_CAPTURE_BY_FILENAME | MODE_WRITER), + DECLARE_BACKEND(CAP_OPENCV_MJPEG, "CV_MJPEG", MODE_CAPTURE_BY_FILENAME | MODE_WRITER), + + // special interfaces / stereo cameras / other SDKs +#if defined(HAVE_DC1394_2) || defined(HAVE_DC1394) || defined(HAVE_CMU1394) + DECLARE_BACKEND(CAP_FIREWIRE, "FIREWIRE", MODE_CAPTURE_ALL), +#endif + // GigE +#ifdef HAVE_PVAPI + DECLARE_BACKEND(CAP_PVAPI, "PVAPI", MODE_CAPTURE_ALL), +#endif +#ifdef HAVE_XIMEA + DECLARE_BACKEND(CAP_XIAPI, "XIMEA", MODE_CAPTURE_ALL), +#endif +#ifdef HAVE_GIGE_API + DECLARE_BACKEND(CAP_GIGANETIX, "GIGANETIX", MODE_CAPTURE_ALL), +#endif +#ifdef HAVE_ARAVIS_API + DECLARE_BACKEND(CAP_ARAVIS, "ARAVIS", MODE_CAPTURE_ALL), +#endif +#ifdef HAVE_UNICAP + DECLARE_BACKEND(CAP_UNICAP, "UNICAP", MODE_CAPTURE_BY_FILENAME), +#endif + +#ifdef HAVE_GPHOTO2 + DECLARE_BACKEND(CAP_GPHOTO2, "GPHOTO2", MODE_CAPTURE_ALL), +#endif +#ifdef HAVE_XINE + DECLARE_BACKEND(CAP_XINE, "XINE", MODE_CAPTURE_BY_FILENAME), +#endif + + // dropped backends: MIL, TYZX, Android +}; + +bool sortByPriority(const VideoBackendInfo &lhs, const VideoBackendInfo &rhs) +{ + return lhs.priority > rhs.priority; +} + +/** @brief Manages list of enabled backends + */ +class VideoBackendRegistry +{ +protected: + std::vector enabledBackends; + VideoBackendRegistry() + { + const int N = sizeof(builtin_backends)/sizeof(builtin_backends[0]); + enabledBackends.assign(builtin_backends, builtin_backends + N); + for (int i = 0; i < N; i++) + { + VideoBackendInfo& info = enabledBackends[i]; + info.priority = 1000 - i * 10; + } + CV_LOG_DEBUG(NULL, "VIDEOIO: Builtin backends(" << N << "): " << dumpBackends()); + if (readPrioritySettings()) + { + CV_LOG_INFO(NULL, "VIDEOIO: Updated backends priorities: " << dumpBackends()); + } + int enabled = 0; + for (int i = 0; i < N; i++) + { + VideoBackendInfo& info = enabledBackends[enabled]; + if (enabled != i) + info = enabledBackends[i]; + size_t param_priority = utils::getConfigurationParameterSizeT(cv::format("OPENCV_VIDEOIO_PRIORITY_%s", info.name).c_str(), (size_t)info.priority); + CV_Assert(param_priority == (size_t)(int)param_priority); // overflow check + if (param_priority > 0) + { + info.priority = (int)param_priority; + enabled++; + } + else + { + CV_LOG_INFO(NULL, "VIDEOIO: Disable backend: " << info.name); + } + } + enabledBackends.resize(enabled); + CV_LOG_DEBUG(NULL, "VIDEOIO: Available backends(" << enabled << "): " << dumpBackends()); + std::sort(enabledBackends.begin(), enabledBackends.end(), sortByPriority); + CV_LOG_INFO(NULL, "VIDEOIO: Enabled backends(" << enabled << ", sorted by priority): " << dumpBackends()); + } + + static std::vector tokenize_string(const std::string& input, char token) + { + std::vector result; + std::string::size_type prev_pos = 0, pos = 0; + while((pos = input.find(token, pos)) != std::string::npos) + { + result.push_back(input.substr(prev_pos, pos-prev_pos)); + prev_pos = ++pos; + } + result.push_back(input.substr(prev_pos)); + return result; + } + bool readPrioritySettings() + { + bool hasChanges = false; + cv::String prioritized_backends = utils::getConfigurationParameterString("OPENCV_VIDEOIO_PRIORITY_LIST", NULL); + if (prioritized_backends.empty()) + return hasChanges; + CV_LOG_INFO(NULL, "VIDEOIO: Configured priority list (OPENCV_VIDEOIO_PRIORITY_LIST): " << prioritized_backends); + const std::vector names = tokenize_string(prioritized_backends, ','); + for (size_t i = 0; i < names.size(); i++) + { + const std::string& name = names[i]; + bool found = false; + for (size_t k = 0; k < enabledBackends.size(); k++) + { + VideoBackendInfo& info = enabledBackends[k]; + if (name == info.name) + { + info.priority = (int)(100000 + (names.size() - i) * 1000); + CV_LOG_DEBUG(NULL, "VIDEOIO: New backend priority: '" << name << "' => " << info.priority); + found = true; + hasChanges = true; + break; + } + } + if (!found) + { + CV_LOG_WARNING(NULL, "VIDEOIO: Can't prioritize unknown/unavailable backend: '" << name << "'"); + } + } + return hasChanges; + } +public: + std::string dumpBackends() const + { + std::ostringstream os; + for (size_t i = 0; i < enabledBackends.size(); i++) + { + if (i > 0) os << "; "; + const VideoBackendInfo& info = enabledBackends[i]; + os << info.name << '(' << info.priority << ')'; + } + return os.str(); + } + + static VideoBackendRegistry& getInstance() + { + static VideoBackendRegistry g_instance; + return g_instance; + } + + inline std::vector getAvailableBackends_CaptureByIndex() const + { + std::vector result; + for (size_t i = 0; i < enabledBackends.size(); i++) + { + const VideoBackendInfo& info = enabledBackends[i]; + if (info.mode & MODE_CAPTURE_BY_INDEX) + result.push_back(info); + } + return result; + } + inline std::vector getAvailableBackends_CaptureByFilename() const + { + std::vector result; + for (size_t i = 0; i < enabledBackends.size(); i++) + { + const VideoBackendInfo& info = enabledBackends[i]; + if (info.mode & MODE_CAPTURE_BY_FILENAME) + result.push_back(info); + } + return result; + } + inline std::vector getAvailableBackends_Writer() const + { + std::vector result; + for (size_t i = 0; i < enabledBackends.size(); i++) + { + const VideoBackendInfo& info = enabledBackends[i]; + if (info.mode & MODE_WRITER) + result.push_back(info); + } + return result; + } +}; + +} // namespace + +namespace videoio_registry { + +std::vector getAvailableBackends_CaptureByIndex() +{ + const std::vector result = VideoBackendRegistry::getInstance().getAvailableBackends_CaptureByFilename(); + return result; +} +std::vector getAvailableBackends_CaptureByFilename() +{ + const std::vector result = VideoBackendRegistry::getInstance().getAvailableBackends_CaptureByFilename(); + return result; +} +std::vector getAvailableBackends_Writer() +{ + const std::vector result = VideoBackendRegistry::getInstance().getAvailableBackends_Writer(); + return result; +} + +} // namespace registry + +#define TRY_OPEN(backend_func) \ +{ \ + try { \ + if (param_VIDEOIO_DEBUG || param_VIDEOCAPTURE_DEBUG) \ + CV_LOG_WARNING(NULL, cv::format("VIDEOIO(%s): trying ...\n", #backend_func)); \ + icap = backend_func; \ + if (param_VIDEOIO_DEBUG ||param_VIDEOCAPTURE_DEBUG) \ + CV_LOG_WARNING(NULL, cv::format("VIDEOIO(%s): result=%p isOpened=%d ...\n", \ + #backend_func, icap.empty() ? NULL : icap.get(), icap.empty() ? -1: icap->isOpened())); \ + } catch(const cv::Exception& e) { \ + CV_LOG_ERROR(NULL, cv::format("VIDEOIO(%s): raised OpenCV exception:\n\n%s\n", #backend_func, e.what())); \ + } catch (const std::exception& e) { \ + CV_LOG_ERROR(NULL, cv::format("VIDEOIO(%s): raised C++ exception:\n\n%s\n", #backend_func, e.what())); \ + } catch(...) { \ + CV_LOG_ERROR(NULL, cv::format("VIDEOIO(%s): raised unknown C++ exception!\n\n", #backend_func)); \ + } \ + break; \ +} + +#define TRY_OPEN_LEGACY(backend_func) \ +{ \ + try { \ + if (param_VIDEOIO_DEBUG || param_VIDEOCAPTURE_DEBUG) \ + CV_LOG_WARNING(NULL, cv::format("VIDEOIO(%s): trying ...\n", #backend_func)); \ + capture = backend_func; \ + if (param_VIDEOIO_DEBUG || param_VIDEOCAPTURE_DEBUG) \ + CV_LOG_WARNING(NULL, cv::format("VIDEOIO(%s): result=%p ...\n", #backend_func, capture)); \ + } catch(const cv::Exception& e) { \ + CV_LOG_ERROR(NULL, cv::format("VIDEOIO(%s): raised OpenCV exception:\n\n%s\n", #backend_func, e.what())); \ + } catch (const std::exception& e) { \ + CV_LOG_ERROR(NULL, cv::format("VIDEOIO(%s): raised C++ exception:\n\n%s\n", #backend_func, e.what())); \ + } catch(...) { \ + CV_LOG_ERROR(NULL, cv::format("VIDEOIO(%s): raised unknown C++ exception!\n\n", #backend_func)); \ + } \ + break; \ +} + + +void VideoCapture_create(CvCapture*& capture, Ptr& icap, VideoCaptureAPIs api, int index) +{ + CV_UNUSED(capture); CV_UNUSED(icap); + switch (api) + { + default: + CV_LOG_WARNING(NULL, "VideoCapture(index=" << index << ") was built without support of requested backendID=" << (int)api); + break; +#ifdef HAVE_GSTREAMER + case CAP_GSTREAMER: + TRY_OPEN(createGStreamerCapture(index)); + break; +#endif +#ifdef HAVE_MSMF + case CAP_MSMF: + TRY_OPEN(cvCreateCapture_MSMF(index)); + break; +#endif +#ifdef HAVE_DSHOW + case CAP_DSHOW: + TRY_OPEN(makePtr(index)); + break; +#endif +#ifdef HAVE_INTELPERC + case CAP_INTELPERC: + TRY_OPEN(makePtr()); + break; +#endif +#ifdef WINRT_VIDEO + case CAP_WINRT: + TRY_OPEN(makePtr(index)); + break; +#endif +#ifdef HAVE_GPHOTO2 + case CAP_GPHOTO2: + TRY_OPEN(createGPhoto2Capture(index)); + break; +#endif + case CAP_VFW: // or CAP_V4L or CAP_V4L2 +#ifdef HAVE_VFW + TRY_OPEN_LEGACY(cvCreateCameraCapture_VFW(index)) +#endif +#if defined HAVE_LIBV4L || defined HAVE_CAMV4L || defined HAVE_CAMV4L2 || defined HAVE_VIDEOIO + TRY_OPEN_LEGACY(cvCreateCameraCapture_V4L(index)) +#endif + break; + case CAP_FIREWIRE: +#ifdef HAVE_DC1394_2 + TRY_OPEN_LEGACY(cvCreateCameraCapture_DC1394_2(index)) +#endif +#ifdef HAVE_DC1394 + TRY_OPEN_LEGACY(cvCreateCameraCapture_DC1394(index)) +#endif +#ifdef HAVE_CMU1394 + TRY_OPEN_LEGACY(cvCreateCameraCapture_CMU(index)) +#endif + break; // CAP_FIREWIRE +#ifdef HAVE_MIL + case CAP_MIL: + TRY_OPEN_LEGACY(cvCreateCameraCapture_MIL(index)) + break; +#endif +#if defined(HAVE_QUICKTIME) || defined(HAVE_QTKIT) + case CAP_QT: + TRY_OPEN_LEGACY(cvCreateCameraCapture_QT(index)) + break; +#endif +#ifdef HAVE_UNICAP + case CAP_UNICAP: + TRY_OPEN_LEGACY(cvCreateCameraCapture_Unicap(index)) + break; +#endif +#ifdef HAVE_PVAPI + case CAP_PVAPI: + TRY_OPEN_LEGACY(cvCreateCameraCapture_PvAPI(index)) + break; +#endif +#ifdef HAVE_OPENNI + case CAP_OPENNI: + TRY_OPEN_LEGACY(cvCreateCameraCapture_OpenNI(index)) + break; +#endif +#ifdef HAVE_OPENNI2 + case CAP_OPENNI2: + TRY_OPEN_LEGACY(cvCreateCameraCapture_OpenNI2(index)) + break; +#endif +#ifdef HAVE_XIMEA + case CAP_XIAPI: + TRY_OPEN_LEGACY(cvCreateCameraCapture_XIMEA(index)) + break; +#endif + +#ifdef HAVE_AVFOUNDATION + case CAP_AVFOUNDATION: + TRY_OPEN_LEGACY(cvCreateCameraCapture_AVFoundation(index)) + break; +#endif + +#ifdef HAVE_GIGE_API + case CAP_GIGANETIX: + TRY_OPEN_LEGACY(cvCreateCameraCapture_Giganetix(index)) + break; +#endif + +#ifdef HAVE_ARAVIS_API + case CAP_ARAVIS: + TRY_OPEN_LEGACY(cvCreateCameraCapture_Aravis(index)) + break; +#endif + } // switch (api) +} + +void VideoCapture_create(CvCapture*& capture, Ptr& icap, VideoCaptureAPIs api, const cv::String& filename) +{ + switch (api) + { + default: + CV_LOG_WARNING(NULL, "VideoCapture(filename=" << filename << ") was built without support of requested backendID=" << (int)api); + break; +#if defined HAVE_LIBV4L || defined HAVE_CAMV4L || defined HAVE_CAMV4L2 || defined HAVE_VIDEOIO + case CAP_V4L: + TRY_OPEN_LEGACY(cvCreateCameraCapture_V4L(filename.c_str())) + break; +#endif + +#ifdef HAVE_VFW + case CAP_VFW: + TRY_OPEN_LEGACY(cvCreateFileCapture_VFW(filename.c_str())) + break; +#endif + +#if defined(HAVE_QUICKTIME) || defined(HAVE_QTKIT) + case CAP_QT: + TRY_OPEN_LEGACY(cvCreateFileCapture_QT(filename.c_str())) + break; +#endif + +#ifdef HAVE_AVFOUNDATION + case CAP_AVFOUNDATION: + TRY_OPEN_LEGACY(cvCreateFileCapture_AVFoundation(filename.c_str())) + break; +#endif + +#ifdef HAVE_OPENNI + case CAP_OPENNI: + TRY_OPEN_LEGACY(cvCreateFileCapture_OpenNI(filename.c_str())) + break; +#endif + +#ifdef HAVE_OPENNI2 + case CAP_OPENNI2: + TRY_OPEN_LEGACY(cvCreateFileCapture_OpenNI2(filename.c_str())) + break; +#endif +#ifdef HAVE_XIMEA + case CAP_XIAPI: + TRY_OPEN_LEGACY(cvCreateCameraCapture_XIMEA(filename.c_str())) + break; +#endif + case CAP_IMAGES: + TRY_OPEN_LEGACY(cvCreateFileCapture_Images(filename.c_str())) + break; +#ifdef HAVE_FFMPEG + case CAP_FFMPEG: + TRY_OPEN(cvCreateFileCapture_FFMPEG_proxy(filename)) + break; +#endif +#ifdef HAVE_GSTREAMER + case CAP_GSTREAMER: + TRY_OPEN(createGStreamerCapture(filename)) + break; +#endif +#ifdef HAVE_XINE + case CAP_XINE: + TRY_OPEN(createXINECapture(filename.c_str())) + break; +#endif +#ifdef HAVE_MSMF + case CAP_MSMF: + TRY_OPEN(cvCreateCapture_MSMF(filename)) + break; +#endif +#ifdef HAVE_GPHOTO2 + case CAP_GPHOTO2: + TRY_OPEN(createGPhoto2Capture(filename)) + break; +#endif +#ifdef HAVE_MFX + case CAP_INTEL_MFX: + TRY_OPEN(makePtr(filename)) + break; +#endif + case CAP_OPENCV_MJPEG: + TRY_OPEN(createMotionJpegCapture(filename)) + break; + } // switch +} + + +void VideoWriter_create(CvVideoWriter*& writer, Ptr& iwriter, VideoCaptureAPIs api, + const String& filename, int fourcc, double fps, const Size& frameSize, bool isColor) +{ +#define CREATE_WRITER(backend_func) \ +{ \ + try { \ + if (param_VIDEOIO_DEBUG || param_VIDEOWRITER_DEBUG) \ + CV_LOG_WARNING(NULL, cv::format("VIDEOIO(%s): trying ...\n", #backend_func)); \ + iwriter = backend_func; \ + if (param_VIDEOIO_DEBUG || param_VIDEOWRITER_DEBUG) \ + CV_LOG_WARNING(NULL, cv::format("VIDEOIO(%s): result=%p isOpened=%d...\n", #backend_func, iwriter.empty() ? NULL : iwriter.get(), iwriter.empty() ? iwriter->isOpened() : -1)); \ + } catch(const cv::Exception& e) { \ + CV_LOG_ERROR(NULL, cv::format("VIDEOIO(%s): raised OpenCV exception:\n\n%s\n", #backend_func, e.what())); \ + } catch (const std::exception& e) { \ + CV_LOG_ERROR(NULL, cv::format("VIDEOIO(%s): raised C++ exception:\n\n%s\n", #backend_func, e.what())); \ + } catch(...) { \ + CV_LOG_ERROR(NULL, cv::format("VIDEOIO(%s): raised unknown C++ exception!\n\n", #backend_func)); \ + } \ + break; \ +} + +#define CREATE_WRITER_LEGACY(backend_func) \ +{ \ + try { \ + if (param_VIDEOIO_DEBUG || param_VIDEOWRITER_DEBUG) \ + CV_LOG_WARNING(NULL, cv::format("VIDEOIO(%s): trying ...\n", #backend_func)); \ + writer = backend_func; \ + if (param_VIDEOIO_DEBUG || param_VIDEOWRITER_DEBUG) \ + CV_LOG_WARNING(NULL, cv::format("VIDEOIO(%s): result=%p...\n", #backend_func, writer)); \ + } catch(const cv::Exception& e) { \ + CV_LOG_ERROR(NULL, cv::format("VIDEOIO(%s): raised OpenCV exception:\n\n%s\n", #backend_func, e.what())); \ + } catch (const std::exception& e) { \ + CV_LOG_ERROR(NULL, cv::format("VIDEOIO(%s): raised C++ exception:\n\n%s\n", #backend_func, e.what())); \ + } catch(...) { \ + CV_LOG_ERROR(NULL, cv::format("VIDEOIO(%s): raised unknown C++ exception!\n\n", #backend_func)); \ + } \ + break; \ +} + + switch (api) + { + default: + CV_LOG_ERROR(NULL, "Unknown VideoWriter backend (check getBuildInformation()): " << (int)api); + break; +#ifdef HAVE_FFMPEG + case CAP_FFMPEG: + CREATE_WRITER(cvCreateVideoWriter_FFMPEG_proxy(filename, fourcc, fps, frameSize, isColor)); + break; +#endif +#ifdef HAVE_MSMF + case CAP_MSMF: + CREATE_WRITER(cvCreateVideoWriter_MSMF(filename, fourcc, fps, frameSize, isColor)); + break; +#endif +#ifdef HAVE_MFX + case CAP_INTEL_MFX: + CREATE_WRITER(VideoWriter_IntelMFX::create(filename, fourcc, fps, frameSize, isColor)); + break; +#endif +#ifdef HAVE_VFW + case CAP_VFW: + CREATE_WRITER_LEGACY(cvCreateVideoWriter_VFW(filename.c_str(), fourcc, fps, frameSize, isColor)) + break; +#endif +#ifdef HAVE_AVFOUNDATION + case CAP_AVFOUNDATION: + CREATE_WRITER_LEGACY(cvCreateVideoWriter_AVFoundation(filename.c_str(), fourcc, fps, frameSize, isColor)) + break; +#endif +#if defined(HAVE_QUICKTIME) || defined(HAVE_QTKIT) + case(CAP_QT): + CREATE_WRITER_LEGACY(cvCreateVideoWriter_QT(filename.c_str(), fourcc, fps, frameSize, isColor)) + break; +#endif +#ifdef HAVE_GSTREAMER +case CAP_GSTREAMER: + CREATE_WRITER_LEGACY(cvCreateVideoWriter_GStreamer (filename.c_str(), fourcc, fps, frameSize, isColor)) + break; +#endif + case CAP_OPENCV_MJPEG: + CREATE_WRITER(createMotionJpegWriter(filename, fourcc, fps, frameSize, isColor)); + break; + case CAP_IMAGES: + if(!fourcc || !fps) + { + CREATE_WRITER_LEGACY(cvCreateVideoWriter_Images(filename.c_str())); + } + break; + } // switch(api) +} + + +} // namespace diff --git a/modules/videoio/src/videoio_registry.hpp b/modules/videoio/src/videoio_registry.hpp new file mode 100644 index 0000000000..a6d4755bd2 --- /dev/null +++ b/modules/videoio/src/videoio_registry.hpp @@ -0,0 +1,43 @@ +// This file is part of OpenCV project. +// It is subject to the license terms in the LICENSE file found in the top-level directory +// of this distribution and at http://opencv.org/license.html. + +#ifndef __OPENCV_VIDEOIO_VIDEOIO_REGISTRY_HPP__ +#define __OPENCV_VIDEOIO_VIDEOIO_REGISTRY_HPP__ + +namespace cv +{ + +/** Capabilities bitmask */ +enum BackendMode { + MODE_CAPTURE_BY_INDEX = 1 << 0, //!< device index + MODE_CAPTURE_BY_FILENAME = 1 << 1, //!< filename or device path (v4l2) + MODE_WRITER = 1 << 4, //!< writer + + MODE_CAPTURE_ALL = MODE_CAPTURE_BY_INDEX + MODE_CAPTURE_BY_FILENAME, +}; + +struct VideoBackendInfo { + VideoCaptureAPIs id; + BackendMode mode; + int priority; // 1000- - default builtin priority + // 0 - disabled (OPENCV_VIDEOIO_PRIORITY_ = 0) + // >10000 - prioritized list (OPENCV_VIDEOIO_PRIORITY_LIST) + const char* name; +}; + +namespace videoio_registry { + +std::vector getAvailableBackends_CaptureByIndex(); +std::vector getAvailableBackends_CaptureByFilename(); +std::vector getAvailableBackends_Writer(); + +} // namespace + +void VideoCapture_create(CvCapture*& capture, Ptr& icap, VideoCaptureAPIs api, int index); +void VideoCapture_create(CvCapture*& capture, Ptr& icap, VideoCaptureAPIs api, const cv::String& filename); +void VideoWriter_create(CvVideoWriter*& writer, Ptr& iwriter, VideoCaptureAPIs api, + const String& filename, int fourcc, double fps, const Size& frameSize, bool isColor); + +} // namespace +#endif // __OPENCV_VIDEOIO_VIDEOIO_REGISTRY_HPP__ diff --git a/modules/videoio/test/test_ffmpeg.cpp b/modules/videoio/test/test_ffmpeg.cpp index 2baeeb8b41..07bb0b7391 100644 --- a/modules/videoio/test/test_ffmpeg.cpp +++ b/modules/videoio/test/test_ffmpeg.cpp @@ -228,7 +228,7 @@ public: static std::string TmpDirectory; CreateVideoWriterInvoker(std::vector& _writers, std::vector& _files) : - ParallelLoopBody(), writers(&_writers), files(&_files) + writers(_writers), files(_files) { } @@ -240,16 +240,16 @@ public: stream << i << ".avi"; std::string fileName = tempfile(stream.str().c_str()); - files->operator[](i) = fileName; - writers->operator[](i) = new VideoWriter(fileName, CAP_FFMPEG, VideoWriter::fourcc('X','V','I','D'), 25.0f, FrameSize); + files[i] = fileName; + writers[i] = new VideoWriter(fileName, CAP_FFMPEG, VideoWriter::fourcc('X','V','I','D'), 25.0f, FrameSize); - CV_Assert(writers->operator[](i)->isOpened()); + CV_Assert(writers[i]->isOpened()); } } private: - std::vector* writers; - std::vector* files; + std::vector& writers; + std::vector& files; }; std::string CreateVideoWriterInvoker::TmpDirectory; From 83ec0f526c64fddf44b76a123fc86bb1b6224f9a Mon Sep 17 00:00:00 2001 From: Alexander Alekhin Date: Tue, 29 May 2018 12:40:52 +0000 Subject: [PATCH 06/40] videoio(ffmpeg): add option to enable debug logs --- modules/videoio/src/cap_ffmpeg_impl.hpp | 24 +++++++++++++++++++++++- 1 file changed, 23 insertions(+), 1 deletion(-) diff --git a/modules/videoio/src/cap_ffmpeg_impl.hpp b/modules/videoio/src/cap_ffmpeg_impl.hpp index 0317831d3a..761a016549 100644 --- a/modules/videoio/src/cap_ffmpeg_impl.hpp +++ b/modules/videoio/src/cap_ffmpeg_impl.hpp @@ -754,6 +754,17 @@ private: AutoLock& operator = (const AutoLock&); // disabled }; +static void ffmpeg_log_callback(void *ptr, int level, const char *fmt, va_list vargs) +{ + static bool skip_header = false; + static int prev_level = -1; + (void)ptr; + if (!skip_header || level != prev_level) printf("[OPENCV:FFMPEG:%02d] ", level); + vprintf(fmt, vargs); + size_t fmt_len = strlen(fmt); + skip_header = fmt_len > 0 && fmt[fmt_len - 1] != '\n'; + prev_level = level; +} class InternalFFMpegRegister { @@ -773,7 +784,18 @@ public: /* register a callback function for synchronization */ av_lockmgr_register(&LockCallBack); - av_log_set_level(AV_LOG_ERROR); +#ifndef NO_GETENV + char* debug_option = getenv("OPENCV_FFMPEG_DEBUG"); + if (debug_option != NULL) + { + av_log_set_level(AV_LOG_VERBOSE); + av_log_set_callback(ffmpeg_log_callback); + } + else +#endif + { + av_log_set_level(AV_LOG_ERROR); + } _initialized = true; } From d70e01b63504e2577059dc09d75cd7390cddfff1 Mon Sep 17 00:00:00 2001 From: Alexander Alekhin Date: Mon, 28 May 2018 12:19:06 +0300 Subject: [PATCH 07/40] calib3d: chess board - replace to cv::findContours() --- modules/calib3d/src/calibinit.cpp | 175 ++++++++++++++++++-- modules/core/include/opencv2/core/types.hpp | 14 ++ 2 files changed, 178 insertions(+), 11 deletions(-) diff --git a/modules/calib3d/src/calibinit.cpp b/modules/calib3d/src/calibinit.cpp index 90a831b079..4c2d9508d1 100644 --- a/modules/calib3d/src/calibinit.cpp +++ b/modules/calib3d/src/calibinit.cpp @@ -76,13 +76,17 @@ #include #include -using namespace cv; -using namespace std; +#include "opencv2/core/utility.hpp" +#include //#define ENABLE_TRIM_COL_ROW //#define DEBUG_CHESSBOARD +//#undef CV_LOG_STRIP_LEVEL +//#define CV_LOG_STRIP_LEVEL CV_LOG_LEVEL_VERBOSE + 1 +#include + #ifdef DEBUG_CHESSBOARD static int PRINTF( const char* fmt, ... ) { @@ -94,17 +98,34 @@ static int PRINTF( const char* fmt, ... ) #define PRINTF(...) #endif +using namespace cv; +using namespace std; + //===================================================================================== // Implementation for the enhanced calibration object detection //===================================================================================== #define MAX_CONTOUR_APPROX 7 +#define USE_CV_FINDCONTOURS // switch between cv::findContours() and legacy C API +#ifdef USE_CV_FINDCONTOURS +struct QuadCountour { + Point pt[4]; + int parent_contour; + + QuadCountour(const Point pt_[4], int parent_contour_) : + parent_contour(parent_contour_) + { + pt[0] = pt_[0]; pt[1] = pt_[1]; pt[2] = pt_[2]; pt[3] = pt_[3]; + } +}; +#else struct CvContourEx { CV_CONTOUR_FIELDS() int counter; }; +#endif //===================================================================================== @@ -1736,7 +1757,6 @@ static int icvGenerateQuads( CvCBQuad **out_quads, CvCBCorner **out_corners, CvMemStorage *storage, const cv::Mat & image_, int flags, int *max_quad_buf_size ) { - CvMat image_old(image_), *image = &image_old; int quad_count = 0; cv::Ptr temp_storage; @@ -1746,17 +1766,144 @@ icvGenerateQuads( CvCBQuad **out_quads, CvCBCorner **out_corners, if( out_corners ) *out_corners = 0; + // empiric bound for minimal allowed perimeter for squares + int min_size = 25; //cvRound( image->cols * image->rows * .03 * 0.01 * 0.92 ); + + bool filterQuads = (flags & CALIB_CB_FILTER_QUADS) != 0; +#ifdef USE_CV_FINDCONTOURS // use cv::findContours + CV_UNUSED(storage); + + std::vector > contours; + std::vector hierarchy; + + cv::findContours(image_, contours, hierarchy, RETR_CCOMP, CHAIN_APPROX_SIMPLE); + + if (contours.empty()) + { + CV_LOG_DEBUG(NULL, "calib3d(chessboard): cv::findContours() returns no contours"); + *max_quad_buf_size = 0; + return 0; + } + + std::vector contour_child_counter(contours.size(), 0); + int boardIdx = -1; + + std::vector contour_quads; + + for (int idx = (int)(contours.size() - 1); idx >= 0; --idx) + { + int parentIdx = hierarchy[idx][3]; + if (hierarchy[idx][2] != -1 || parentIdx == -1) // holes only (no child contours and with parent) + continue; + const std::vector& contour = contours[idx]; + + Rect contour_rect = boundingRect(contour); + if (contour_rect.area() < min_size) + continue; + + std::vector approx_contour; + + const int min_approx_level = 1, max_approx_level = MAX_CONTOUR_APPROX; + for (int approx_level = min_approx_level; approx_level <= max_approx_level; approx_level++ ) + { + approxPolyDP(contour, approx_contour, (float)approx_level, true); + if (approx_contour.size() == 4) + break; + + // we call this again on its own output, because sometimes + // approxPoly() does not simplify as much as it should. + std::vector approx_contour_tmp; + std::swap(approx_contour, approx_contour_tmp); + approxPolyDP(approx_contour_tmp, approx_contour, (float)approx_level, true); + if (approx_contour.size() == 4) + break; + } + + // reject non-quadrangles + if (approx_contour.size() != 4) + continue; + if (!cv::isContourConvex(approx_contour)) + continue; + + cv::Point pt[4]; + for (int i = 0; i < 4; ++i) + pt[i] = approx_contour[i]; + CV_LOG_VERBOSE(NULL, 9, "... contours(" << contour_quads.size() << " added):" << pt[0] << " " << pt[1] << " " << pt[2] << " " << pt[3]); + + if (filterQuads) + { + double p = cv::arcLength(approx_contour, true); + double area = cv::contourArea(approx_contour, false); + + double d1 = sqrt(normL2Sqr(pt[0] - pt[2])); + double d2 = sqrt(normL2Sqr(pt[1] - pt[3])); + + // philipg. Only accept those quadrangles which are more square + // than rectangular and which are big enough + double d3 = sqrt(normL2Sqr(pt[0] - pt[1])); + double d4 = sqrt(normL2Sqr(pt[1] - pt[2])); + if (!(d3*4 > d4 && d4*4 > d3 && d3*d4 < area*1.5 && area > min_size && + d1 >= 0.15 * p && d2 >= 0.15 * p)) + continue; + } + + contour_child_counter[parentIdx]++; + if (boardIdx != parentIdx && (boardIdx < 0 || contour_child_counter[boardIdx] < contour_child_counter[parentIdx])) + boardIdx = parentIdx; + + contour_quads.push_back(QuadCountour(pt, parentIdx)); + } + + size_t total = contour_quads.size(); + *max_quad_buf_size = (int)std::max((size_t)2, total * 3); + *out_quads = (CvCBQuad*)cvAlloc(*max_quad_buf_size * sizeof((*out_quads)[0])); + *out_corners = (CvCBCorner*)cvAlloc(*max_quad_buf_size * 4 * sizeof((*out_corners)[0])); + + // Create array of quads structures + for(int idx = 0; idx < (int)contour_quads.size(); idx++ ) + { + CvCBQuad* q = &(*out_quads)[quad_count]; + + QuadCountour& qc = contour_quads[idx]; + if (filterQuads && qc.parent_contour != boardIdx) + continue; + + // reset group ID + memset(q, 0, sizeof(*q)); + q->group_idx = -1; + for (int i = 0; i < 4; ++i) + { + CvCBCorner* corner = &(*out_corners)[quad_count*4 + i]; + + memset(corner, 0, sizeof(*corner)); + corner->pt = qc.pt[i]; + q->corners[i] = corner; + } + q->edge_len = FLT_MAX; + for (int i = 0; i < 4; ++i) + { + // TODO simplify with normL2Sqr() + float dx = q->corners[i]->pt.x - q->corners[(i+1)&3]->pt.x; + float dy = q->corners[i]->pt.y - q->corners[(i+1)&3]->pt.y; + float d = dx*dx + dy*dy; + if (q->edge_len > d) + q->edge_len = d; + } + quad_count++; + } + +#else // use legacy API: cvStartFindContours / cvFindNextContour / cvEndFindContours + + CvMat image_old(image_), *image = &image_old; + CvSeq *src_contour = 0; CvSeq *root; CvContourEx* board = 0; CvContourScanner scanner; - int i, idx, min_size; + int i, idx; CV_Assert( out_corners && out_quads ); - // empiric bound for minimal allowed perimeter for squares - min_size = 25; //cvRound( image->cols * image->rows * .03 * 0.01 * 0.92 ); - // create temporary storage for contours and the sequence of pointers to found quadrangles temp_storage.reset(cvCreateChildMemStorage( storage )); root = cvCreateSeq( 0, sizeof(CvSeq), sizeof(CvSeq*), temp_storage ); @@ -1820,9 +1967,9 @@ icvGenerateQuads( CvCBQuad **out_quads, CvCBCorner **out_corners, dx = pt[1].x - pt[2].x; dy = pt[1].y - pt[2].y; d4 = sqrt(dx*dx + dy*dy); - if( !(flags & CV_CALIB_CB_FILTER_QUADS) || + if (!filterQuads || (d3*4 > d4 && d4*4 > d3 && d3*d4 < area*1.5 && area > min_size && - d1 >= 0.15 * p && d2 >= 0.15 * p) ) + d1 >= 0.15 * p && d2 >= 0.15 * p)) { CvContourEx* parent = (CvContourEx*)(src_contour->v_prev); parent->counter++; @@ -1840,7 +1987,8 @@ icvGenerateQuads( CvCBQuad **out_quads, CvCBCorner **out_corners, cvEndFindContours( &scanner ); // allocate quad & corner buffers - *max_quad_buf_size = MAX(1, (root->total+root->total / 2)) * 2; + int total = root->total; + *max_quad_buf_size = MAX(1, (total + total / 2)) * 2; *out_quads = (CvCBQuad*)cvAlloc(*max_quad_buf_size * sizeof((*out_quads)[0])); *out_corners = (CvCBCorner*)cvAlloc(*max_quad_buf_size * 4 * sizeof((*out_corners)[0])); @@ -1849,7 +1997,7 @@ icvGenerateQuads( CvCBQuad **out_quads, CvCBCorner **out_corners, { CvCBQuad* q = &(*out_quads)[quad_count]; src_contour = *(CvSeq**)cvGetSeqElem( root, idx ); - if( (flags & CV_CALIB_CB_FILTER_QUADS) && src_contour->v_prev != (CvSeq*)board ) + if (filterQuads && src_contour->v_prev != (CvSeq*)board) continue; // reset group ID @@ -1878,6 +2026,11 @@ icvGenerateQuads( CvCBQuad **out_quads, CvCBCorner **out_corners, } quad_count++; } +#endif + + CV_LOG_VERBOSE(NULL, 3, "Total quad contours: " << total); + CV_LOG_VERBOSE(NULL, 3, "max_quad_buf_size=" << *max_quad_buf_size); + CV_LOG_VERBOSE(NULL, 3, "filtered quad_count=" << quad_count); return quad_count; } diff --git a/modules/core/include/opencv2/core/types.hpp b/modules/core/include/opencv2/core/types.hpp index 6d8782058a..503743470c 100644 --- a/modules/core/include/opencv2/core/types.hpp +++ b/modules/core/include/opencv2/core/types.hpp @@ -1376,6 +1376,20 @@ Point_<_Tp> operator / (const Point_<_Tp>& a, double b) } +template static inline _AccTp normL2Sqr(const Point_& pt); +template static inline _AccTp normL2Sqr(const Point_& pt); +template static inline _AccTp normL2Sqr(const Point_& pt); +template static inline _AccTp normL2Sqr(const Point_& pt); + +template<> inline int normL2Sqr(const Point_& pt) { return pt.dot(pt); } +template<> inline int64 normL2Sqr(const Point_& pt) { return pt.dot(pt); } +template<> inline float normL2Sqr(const Point_& pt) { return pt.dot(pt); } +template<> inline double normL2Sqr(const Point_& pt) { return pt.dot(pt); } + +template<> inline double normL2Sqr(const Point_& pt) { return pt.ddot(pt); } +template<> inline double normL2Sqr(const Point_& pt) { return pt.ddot(pt); } + + //////////////////////////////// 3D Point /////////////////////////////// From bf87a43185ea11be65b974627273f2a191d66873 Mon Sep 17 00:00:00 2001 From: Dmitry Kurtaev Date: Tue, 3 Apr 2018 18:28:05 +0300 Subject: [PATCH 08/40] Faster-RCNN object detection models from TensorFlow --- .../dnn/include/opencv2/dnn/all_layers.hpp | 6 + modules/dnn/src/init.cpp | 1 + .../dnn/src/layers/crop_and_resize_layer.cpp | 108 +++++++ .../dnn/src/layers/detection_output_layer.cpp | 5 +- modules/dnn/src/tensorflow/tf_importer.cpp | 31 +- modules/dnn/test/test_tf_importer.cpp | 16 + samples/dnn/README.md | 4 +- samples/dnn/tf_text_graph_faster_rcnn.py | 291 ++++++++++++++++++ 8 files changed, 457 insertions(+), 5 deletions(-) create mode 100644 modules/dnn/src/layers/crop_and_resize_layer.cpp create mode 100644 samples/dnn/tf_text_graph_faster_rcnn.py diff --git a/modules/dnn/include/opencv2/dnn/all_layers.hpp b/modules/dnn/include/opencv2/dnn/all_layers.hpp index f2124dd516..ffb09a2b95 100644 --- a/modules/dnn/include/opencv2/dnn/all_layers.hpp +++ b/modules/dnn/include/opencv2/dnn/all_layers.hpp @@ -581,6 +581,12 @@ CV__DNN_EXPERIMENTAL_NS_BEGIN static Ptr create(const LayerParams& params); }; + class CV_EXPORTS CropAndResizeLayer : public Layer + { + public: + static Ptr create(const LayerParams& params); + }; + //! @} //! @} CV__DNN_EXPERIMENTAL_NS_END diff --git a/modules/dnn/src/init.cpp b/modules/dnn/src/init.cpp index 28759daf2f..2bff16c4eb 100644 --- a/modules/dnn/src/init.cpp +++ b/modules/dnn/src/init.cpp @@ -84,6 +84,7 @@ void initializeLayerFactory() CV_DNN_REGISTER_LAYER_CLASS(Reshape, ReshapeLayer); CV_DNN_REGISTER_LAYER_CLASS(Flatten, FlattenLayer); CV_DNN_REGISTER_LAYER_CLASS(ResizeNearestNeighbor, ResizeNearestNeighborLayer); + CV_DNN_REGISTER_LAYER_CLASS(CropAndResize, CropAndResizeLayer); CV_DNN_REGISTER_LAYER_CLASS(Convolution, ConvolutionLayer); CV_DNN_REGISTER_LAYER_CLASS(Deconvolution, DeconvolutionLayer); diff --git a/modules/dnn/src/layers/crop_and_resize_layer.cpp b/modules/dnn/src/layers/crop_and_resize_layer.cpp new file mode 100644 index 0000000000..3f92a8488d --- /dev/null +++ b/modules/dnn/src/layers/crop_and_resize_layer.cpp @@ -0,0 +1,108 @@ +#include "../precomp.hpp" +#include "layers_common.hpp" + +namespace cv { namespace dnn { + +class CropAndResizeLayerImpl CV_FINAL : public CropAndResizeLayer +{ +public: + CropAndResizeLayerImpl(const LayerParams& params) + { + CV_Assert(params.has("width"), params.has("height")); + outWidth = params.get("width"); + outHeight = params.get("height"); + } + + bool getMemoryShapes(const std::vector &inputs, + const int requiredOutputs, + std::vector &outputs, + std::vector &internals) const CV_OVERRIDE + { + CV_Assert(inputs.size() == 2, inputs[0].size() == 4); + if (inputs[0][0] != 1) + CV_Error(Error::StsNotImplemented, ""); + outputs.resize(1, MatShape(4)); + outputs[0][0] = inputs[1][2]; // Number of bounding boxes. + outputs[0][1] = inputs[0][1]; // Number of channels. + outputs[0][2] = outHeight; + outputs[0][3] = outWidth; + return false; + } + + void forward(InputArrayOfArrays inputs_arr, OutputArrayOfArrays outputs_arr, OutputArrayOfArrays internals_arr) CV_OVERRIDE + { + CV_TRACE_FUNCTION(); + CV_TRACE_ARG_VALUE(name, "name", name.c_str()); + + Layer::forward_fallback(inputs_arr, outputs_arr, internals_arr); + } + + void forward(std::vector &inputs, std::vector &outputs, std::vector &internals) CV_OVERRIDE + { + CV_TRACE_FUNCTION(); + CV_TRACE_ARG_VALUE(name, "name", name.c_str()); + + Mat& inp = *inputs[0]; + Mat& out = outputs[0]; + Mat boxes = inputs[1]->reshape(1, inputs[1]->total() / 7); + const int numChannels = inp.size[1]; + const int inpHeight = inp.size[2]; + const int inpWidth = inp.size[3]; + const int inpSpatialSize = inpHeight * inpWidth; + const int outSpatialSize = outHeight * outWidth; + CV_Assert(inp.isContinuous(), out.isContinuous()); + + for (int b = 0; b < boxes.rows; ++b) + { + float* outDataBox = out.ptr(b); + float left = boxes.at(b, 3); + float top = boxes.at(b, 4); + float right = boxes.at(b, 5); + float bottom = boxes.at(b, 6); + float boxWidth = right - left; + float boxHeight = bottom - top; + + float heightScale = boxHeight * static_cast(inpHeight - 1) / (outHeight - 1); + float widthScale = boxWidth * static_cast(inpWidth - 1) / (outWidth - 1); + for (int y = 0; y < outHeight; ++y) + { + float input_y = top * (inpHeight - 1) + y * heightScale; + int y0 = static_cast(input_y); + const float* inpData_row0 = (float*)inp.data + y0 * inpWidth; + const float* inpData_row1 = (y0 + 1 < inpHeight) ? (inpData_row0 + inpWidth) : inpData_row0; + for (int x = 0; x < outWidth; ++x) + { + float input_x = left * (inpWidth - 1) + x * widthScale; + int x0 = static_cast(input_x); + int x1 = std::min(x0 + 1, inpWidth - 1); + + float* outData = outDataBox + y * outWidth + x; + const float* inpData_row0_c = inpData_row0; + const float* inpData_row1_c = inpData_row1; + for (int c = 0; c < numChannels; ++c) + { + *outData = inpData_row0_c[x0] + + (input_y - y0) * (inpData_row1_c[x0] - inpData_row0_c[x0]) + + (input_x - x0) * (inpData_row0_c[x1] - inpData_row0_c[x0] + + (input_y - y0) * (inpData_row1_c[x1] - inpData_row0_c[x1] - inpData_row1_c[x0] + inpData_row0_c[x0])); + + inpData_row0_c += inpSpatialSize; + inpData_row1_c += inpSpatialSize; + outData += outSpatialSize; + } + } + } + } + } + +private: + int outWidth, outHeight; +}; + +Ptr CropAndResizeLayer::create(const LayerParams& params) +{ + return Ptr(new CropAndResizeLayerImpl(params)); +} + +} // namespace dnn +} // namespace cv diff --git a/modules/dnn/src/layers/detection_output_layer.cpp b/modules/dnn/src/layers/detection_output_layer.cpp index 44f7b32853..ee1ad95e61 100644 --- a/modules/dnn/src/layers/detection_output_layer.cpp +++ b/modules/dnn/src/layers/detection_output_layer.cpp @@ -208,8 +208,9 @@ public: CV_Assert(inputs[0][0] == inputs[1][0]); int numPriors = inputs[2][2] / 4; - CV_Assert((numPriors * _numLocClasses * 4) == inputs[0][1]); - CV_Assert(int(numPriors * _numClasses) == inputs[1][1]); + CV_Assert((numPriors * _numLocClasses * 4) == total(inputs[0], 1)); + CV_Assert(int(numPriors * _numClasses) == total(inputs[1], 1)); + CV_Assert(inputs[2][1] == 1 + (int)(!_varianceEncodedInTarget)); // num() and channels() are 1. // Since the number of bboxes to be kept is unknown before nms, we manually diff --git a/modules/dnn/src/tensorflow/tf_importer.cpp b/modules/dnn/src/tensorflow/tf_importer.cpp index bca150e3b5..f19daf9cc6 100644 --- a/modules/dnn/src/tensorflow/tf_importer.cpp +++ b/modules/dnn/src/tensorflow/tf_importer.cpp @@ -1094,9 +1094,9 @@ void TFImporter::populateNet(Net dstNet) CV_Assert(!begins.empty(), !sizes.empty(), begins.type() == CV_32SC1, sizes.type() == CV_32SC1); - if (begins.total() == 4) + if (begins.total() == 4 && data_layouts[name] == DATA_LAYOUT_NHWC) { - // Perhabs, we have an NHWC order. Swap it to NCHW. + // Swap NHWC parameters' order to NCHW. std::swap(*begins.ptr(0, 2), *begins.ptr(0, 3)); std::swap(*begins.ptr(0, 1), *begins.ptr(0, 2)); std::swap(*sizes.ptr(0, 2), *sizes.ptr(0, 3)); @@ -1176,6 +1176,9 @@ void TFImporter::populateNet(Net dstNet) layers_to_ignore.insert(next_layers[0].first); } + if (hasLayerAttr(layer, "axis")) + layerParams.set("axis", getLayerAttr(layer, "axis").i()); + id = dstNet.addLayer(name, "Scale", layerParams); } layer_id[name] = id; @@ -1547,6 +1550,10 @@ void TFImporter::populateNet(Net dstNet) layerParams.set("confidence_threshold", getLayerAttr(layer, "confidence_threshold").f()); if (hasLayerAttr(layer, "loc_pred_transposed")) layerParams.set("loc_pred_transposed", getLayerAttr(layer, "loc_pred_transposed").b()); + if (hasLayerAttr(layer, "clip")) + layerParams.set("clip", getLayerAttr(layer, "clip").b()); + if (hasLayerAttr(layer, "variance_encoded_in_target")) + layerParams.set("variance_encoded_in_target", getLayerAttr(layer, "variance_encoded_in_target").b()); int id = dstNet.addLayer(name, "DetectionOutput", layerParams); layer_id[name] = id; @@ -1563,6 +1570,26 @@ void TFImporter::populateNet(Net dstNet) layer_id[name] = id; connectToAllBlobs(layer_id, dstNet, parsePin(layer.input(0)), id, layer.input_size()); } + else if (type == "CropAndResize") + { + // op: "CropAndResize" + // input: "input" + // input: "boxes" + // input: "sizes" + CV_Assert(layer.input_size() == 3); + + Mat cropSize = getTensorContent(getConstBlob(layer, value_id, 2)); + CV_Assert(cropSize.type() == CV_32SC1, cropSize.total() == 2); + + layerParams.set("height", cropSize.at(0)); + layerParams.set("width", cropSize.at(1)); + + int id = dstNet.addLayer(name, "CropAndResize", layerParams); + layer_id[name] = id; + + connect(layer_id, dstNet, parsePin(layer.input(0)), id, 0); + connect(layer_id, dstNet, parsePin(layer.input(1)), id, 1); + } else if (type == "Mean") { Mat indices = getTensorContent(getConstBlob(layer, value_id, 1)); diff --git a/modules/dnn/test/test_tf_importer.cpp b/modules/dnn/test/test_tf_importer.cpp index b090fd7a16..84205f72fb 100644 --- a/modules/dnn/test/test_tf_importer.cpp +++ b/modules/dnn/test/test_tf_importer.cpp @@ -270,6 +270,22 @@ TEST_P(Test_TensorFlow_nets, Inception_v2_SSD) normAssertDetections(ref, out, "", 0.5); } +TEST_P(Test_TensorFlow_nets, Inception_v2_Faster_RCNN) +{ + std::string proto = findDataFile("dnn/faster_rcnn_inception_v2_coco_2018_01_28.pbtxt", false); + std::string model = findDataFile("dnn/faster_rcnn_inception_v2_coco_2018_01_28.pb", false); + + Net net = readNetFromTensorflow(model, proto); + Mat img = imread(findDataFile("dnn/dog416.png", false)); + Mat blob = blobFromImage(img, 1.0f / 127.5, Size(800, 600), Scalar(127.5, 127.5, 127.5), true, false); + + net.setInput(blob); + Mat out = net.forward(); + + Mat ref = blobFromNPY(findDataFile("dnn/tensorflow/faster_rcnn_inception_v2_coco_2018_01_28.detection_out.npy")); + normAssertDetections(ref, out, "", 0.3); +} + TEST_P(Test_TensorFlow_nets, opencv_face_detector_uint8) { std::string proto = findDataFile("dnn/opencv_face_detector.pbtxt", false); diff --git a/samples/dnn/README.md b/samples/dnn/README.md index c438bb0910..9072ddb2a8 100644 --- a/samples/dnn/README.md +++ b/samples/dnn/README.md @@ -11,8 +11,10 @@ | [SSDs from TensorFlow](https://github.com/tensorflow/models/tree/master/research/object_detection/) | `0.00784 (2/255)` | `300x300` | `127.5 127.5 127.5` | RGB | | [YOLO](https://pjreddie.com/darknet/yolo/) | `0.00392 (1/255)` | `416x416` | `0 0 0` | RGB | | [VGG16-SSD](https://github.com/weiliu89/caffe/tree/ssd) | `1.0` | `300x300` | `104 117 123` | BGR | -| [Faster-RCNN](https://github.com/rbgirshick/py-faster-rcnn) | `1.0` | `800x600` | `102.9801, 115.9465, 122.7717` | BGR | +| [Faster-RCNN](https://github.com/rbgirshick/py-faster-rcnn) | `1.0` | `800x600` | `102.9801 115.9465 122.7717` | BGR | | [R-FCN](https://github.com/YuwenXiong/py-R-FCN) | `1.0` | `800x600` | `102.9801 115.9465 122.7717` | BGR | +| [Faster-RCNN, ResNet backbone](https://github.com/tensorflow/models/tree/master/research/object_detection/) | `1.0` | `300x300` | `103.939 116.779 123.68` | RGB | +| [Faster-RCNN, InceptionV2 backbone](https://github.com/tensorflow/models/tree/master/research/object_detection/) | `0.00784 (2/255)` | `300x300` | `127.5 127.5 127.5` | RGB | #### Face detection [An origin model](https://github.com/opencv/opencv/tree/master/samples/dnn/face_detector) diff --git a/samples/dnn/tf_text_graph_faster_rcnn.py b/samples/dnn/tf_text_graph_faster_rcnn.py new file mode 100644 index 0000000000..7ad5de283a --- /dev/null +++ b/samples/dnn/tf_text_graph_faster_rcnn.py @@ -0,0 +1,291 @@ +import argparse +import numpy as np +import tensorflow as tf + +from tensorflow.core.framework.node_def_pb2 import NodeDef +from tensorflow.tools.graph_transforms import TransformGraph +from google.protobuf import text_format + +parser = argparse.ArgumentParser(description='Run this script to get a text graph of ' + 'SSD model from TensorFlow Object Detection API. ' + 'Then pass it with .pb file to cv::dnn::readNetFromTensorflow function.') +parser.add_argument('--input', required=True, help='Path to frozen TensorFlow graph.') +parser.add_argument('--output', required=True, help='Path to output text graph.') +parser.add_argument('--num_classes', default=90, type=int, help='Number of trained classes.') +parser.add_argument('--scales', default=[0.25, 0.5, 1.0, 2.0], type=float, nargs='+', + help='Hyper-parameter of grid_anchor_generator from a config file.') +parser.add_argument('--aspect_ratios', default=[0.5, 1.0, 2.0], type=float, nargs='+', + help='Hyper-parameter of grid_anchor_generator from a config file.') +parser.add_argument('--features_stride', default=16, type=float, nargs='+', + help='Hyper-parameter from a config file.') +args = parser.parse_args() + +scopesToKeep = ('FirstStageFeatureExtractor', 'Conv', + 'FirstStageBoxPredictor/BoxEncodingPredictor', + 'FirstStageBoxPredictor/ClassPredictor', + 'CropAndResize', + 'MaxPool2D', + 'SecondStageFeatureExtractor', + 'SecondStageBoxPredictor', + 'image_tensor') + +scopesToIgnore = ('FirstStageFeatureExtractor/Assert', + 'FirstStageFeatureExtractor/Shape', + 'FirstStageFeatureExtractor/strided_slice', + 'FirstStageFeatureExtractor/GreaterEqual', + 'FirstStageFeatureExtractor/LogicalAnd') + +unusedAttrs = ['T', 'Tshape', 'N', 'Tidx', 'Tdim', 'use_cudnn_on_gpu', + 'Index', 'Tperm', 'is_training', 'Tpaddings'] + +# Read the graph. +with tf.gfile.FastGFile(args.input, 'rb') as f: + graph_def = tf.GraphDef() + graph_def.ParseFromString(f.read()) + +# Removes Identity nodes +def removeIdentity(): + identities = {} + for node in graph_def.node: + if node.op == 'Identity': + identities[node.name] = node.input[0] + graph_def.node.remove(node) + + for node in graph_def.node: + for i in range(len(node.input)): + if node.input[i] in identities: + node.input[i] = identities[node.input[i]] + +removeIdentity() + +removedNodes = [] + +for i in reversed(range(len(graph_def.node))): + op = graph_def.node[i].op + name = graph_def.node[i].name + + if op == 'Const' or name.startswith(scopesToIgnore) or not name.startswith(scopesToKeep): + if op != 'Const': + removedNodes.append(name) + + del graph_def.node[i] + else: + for attr in unusedAttrs: + if attr in graph_def.node[i].attr: + del graph_def.node[i].attr[attr] + +# Remove references to removed nodes except Const nodes. +for node in graph_def.node: + for i in reversed(range(len(node.input))): + if node.input[i] in removedNodes: + del node.input[i] + + +# Connect input node to the first layer +assert(graph_def.node[0].op == 'Placeholder') +graph_def.node[1].input.insert(0, graph_def.node[0].name) + +# Temporarily remove top nodes. +topNodes = [] +while True: + node = graph_def.node.pop() + topNodes.append(node) + if node.op == 'CropAndResize': + break + +def tensorMsg(values): + if all([isinstance(v, float) for v in values]): + dtype = 'DT_FLOAT' + field = 'float_val' + elif all([isinstance(v, int) for v in values]): + dtype = 'DT_INT32' + field = 'int_val' + else: + raise Exception('Wrong values types') + + msg = 'tensor { dtype: ' + dtype + ' tensor_shape { dim { size: %d } }' % len(values) + for value in values: + msg += '%s: %s ' % (field, str(value)) + return msg + '}' + +def addSlice(inp, out, begins, sizes): + beginsNode = NodeDef() + beginsNode.name = out + '/begins' + beginsNode.op = 'Const' + text_format.Merge(tensorMsg(begins), beginsNode.attr["value"]) + graph_def.node.extend([beginsNode]) + + sizesNode = NodeDef() + sizesNode.name = out + '/sizes' + sizesNode.op = 'Const' + text_format.Merge(tensorMsg(sizes), sizesNode.attr["value"]) + graph_def.node.extend([sizesNode]) + + sliced = NodeDef() + sliced.name = out + sliced.op = 'Slice' + sliced.input.append(inp) + sliced.input.append(beginsNode.name) + sliced.input.append(sizesNode.name) + graph_def.node.extend([sliced]) + +def addReshape(inp, out, shape): + shapeNode = NodeDef() + shapeNode.name = out + '/shape' + shapeNode.op = 'Const' + text_format.Merge(tensorMsg(shape), shapeNode.attr["value"]) + graph_def.node.extend([shapeNode]) + + reshape = NodeDef() + reshape.name = out + reshape.op = 'Reshape' + reshape.input.append(inp) + reshape.input.append(shapeNode.name) + graph_def.node.extend([reshape]) + +def addSoftMax(inp, out): + softmax = NodeDef() + softmax.name = out + softmax.op = 'Softmax' + text_format.Merge('i: -1', softmax.attr['axis']) + softmax.input.append(inp) + graph_def.node.extend([softmax]) + +addReshape('FirstStageBoxPredictor/ClassPredictor/BiasAdd', + 'FirstStageBoxPredictor/ClassPredictor/reshape_1', [0, -1, 2]) + +addSoftMax('FirstStageBoxPredictor/ClassPredictor/reshape_1', + 'FirstStageBoxPredictor/ClassPredictor/softmax') # Compare with Reshape_4 + +flatten = NodeDef() +flatten.name = 'FirstStageBoxPredictor/BoxEncodingPredictor/flatten' # Compare with FirstStageBoxPredictor/BoxEncodingPredictor/BiasAdd +flatten.op = 'Flatten' +flatten.input.append('FirstStageBoxPredictor/BoxEncodingPredictor/BiasAdd') +graph_def.node.extend([flatten]) + +proposals = NodeDef() +proposals.name = 'proposals' # Compare with ClipToWindow/Gather/Gather (NOTE: normalized) +proposals.op = 'PriorBox' +proposals.input.append('FirstStageBoxPredictor/BoxEncodingPredictor/BiasAdd') +proposals.input.append(graph_def.node[0].name) # image_tensor + +text_format.Merge('b: false', proposals.attr["flip"]) +text_format.Merge('b: true', proposals.attr["clip"]) +text_format.Merge('f: %f' % args.features_stride, proposals.attr["step"]) +text_format.Merge('f: 0.0', proposals.attr["offset"]) +text_format.Merge(tensorMsg([0.1, 0.1, 0.2, 0.2]), proposals.attr["variance"]) + +widths = [] +heights = [] +for a in args.aspect_ratios: + for s in args.scales: + ar = np.sqrt(a) + heights.append((args.features_stride**2) * s / ar) + widths.append((args.features_stride**2) * s * ar) + +text_format.Merge(tensorMsg(widths), proposals.attr["width"]) +text_format.Merge(tensorMsg(heights), proposals.attr["height"]) + +graph_def.node.extend([proposals]) + +# Compare with Reshape_5 +detectionOut = NodeDef() +detectionOut.name = 'detection_out' +detectionOut.op = 'DetectionOutput' + +detectionOut.input.append('FirstStageBoxPredictor/BoxEncodingPredictor/flatten') +detectionOut.input.append('FirstStageBoxPredictor/ClassPredictor/softmax') +detectionOut.input.append('proposals') + +text_format.Merge('i: 2', detectionOut.attr['num_classes']) +text_format.Merge('b: true', detectionOut.attr['share_location']) +text_format.Merge('i: 0', detectionOut.attr['background_label_id']) +text_format.Merge('f: 0.7', detectionOut.attr['nms_threshold']) +text_format.Merge('i: 6000', detectionOut.attr['top_k']) +text_format.Merge('s: "CENTER_SIZE"', detectionOut.attr['code_type']) +text_format.Merge('i: 100', detectionOut.attr['keep_top_k']) +text_format.Merge('b: true', detectionOut.attr['clip']) +text_format.Merge('b: true', detectionOut.attr['loc_pred_transposed']) + +graph_def.node.extend([detectionOut]) + +# Save as text. +for node in reversed(topNodes): + graph_def.node.extend([node]) + +addSoftMax('SecondStageBoxPredictor/Reshape_1', 'SecondStageBoxPredictor/Reshape_1/softmax') + +addSlice('SecondStageBoxPredictor/Reshape_1/softmax', + 'SecondStageBoxPredictor/Reshape_1/slice', + [0, 0, 1], [-1, -1, -1]) + +addReshape('SecondStageBoxPredictor/Reshape_1/slice', + 'SecondStageBoxPredictor/Reshape_1/Reshape', [1, -1]) + +# Replace Flatten subgraph onto a single node. +for i in reversed(range(len(graph_def.node))): + if graph_def.node[i].op == 'CropAndResize': + graph_def.node[i].input.insert(1, 'detection_out') + + if graph_def.node[i].name == 'SecondStageBoxPredictor/Reshape': + shapeNode = NodeDef() + shapeNode.name = 'SecondStageBoxPredictor/Reshape/shape2' + shapeNode.op = 'Const' + text_format.Merge(tensorMsg([1, -1, 4]), shapeNode.attr["value"]) + graph_def.node.extend([shapeNode]) + + graph_def.node[i].input.pop() + graph_def.node[i].input.append(shapeNode.name) + + if graph_def.node[i].name in ['SecondStageBoxPredictor/Flatten/flatten/Shape', + 'SecondStageBoxPredictor/Flatten/flatten/strided_slice', + 'SecondStageBoxPredictor/Flatten/flatten/Reshape/shape']: + del graph_def.node[i] + +for node in graph_def.node: + if node.name == 'SecondStageBoxPredictor/Flatten/flatten/Reshape': + node.op = 'Flatten' + node.input.pop() + break + +################################################################################ +### Postprocessing +################################################################################ +addSlice('detection_out', 'detection_out/slice', [0, 0, 0, 3], [-1, -1, -1, 4]) + +variance = NodeDef() +variance.name = 'proposals/variance' +variance.op = 'Const' +text_format.Merge(tensorMsg([0.1, 0.1, 0.2, 0.2]), variance.attr["value"]) +graph_def.node.extend([variance]) + +varianceEncoder = NodeDef() +varianceEncoder.name = 'variance_encoded' +varianceEncoder.op = 'Mul' +varianceEncoder.input.append('SecondStageBoxPredictor/Reshape') +varianceEncoder.input.append(variance.name) +text_format.Merge('i: 2', varianceEncoder.attr["axis"]) +graph_def.node.extend([varianceEncoder]) + +addReshape('detection_out/slice', 'detection_out/slice/reshape', [1, 1, -1]) + +detectionOut = NodeDef() +detectionOut.name = 'detection_out_final' +detectionOut.op = 'DetectionOutput' + +detectionOut.input.append('variance_encoded') +detectionOut.input.append('SecondStageBoxPredictor/Reshape_1/Reshape') +detectionOut.input.append('detection_out/slice/reshape') + +text_format.Merge('i: %d' % args.num_classes, detectionOut.attr['num_classes']) +text_format.Merge('b: false', detectionOut.attr['share_location']) +text_format.Merge('i: %d' % (args.num_classes + 1), detectionOut.attr['background_label_id']) +text_format.Merge('f: 0.6', detectionOut.attr['nms_threshold']) +text_format.Merge('s: "CENTER_SIZE"', detectionOut.attr['code_type']) +text_format.Merge('i: 100', detectionOut.attr['keep_top_k']) +text_format.Merge('b: true', detectionOut.attr['loc_pred_transposed']) +text_format.Merge('b: true', detectionOut.attr['clip']) +text_format.Merge('b: true', detectionOut.attr['variance_encoded_in_target']) +graph_def.node.extend([detectionOut]) + +tf.train.write_graph(graph_def, "", args.output, as_text=True) From 1207300e01c7acd5078a370fe5bec40753d17dc3 Mon Sep 17 00:00:00 2001 From: Alexander Alekhin Date: Wed, 30 May 2018 17:24:43 +0300 Subject: [PATCH 09/40] python: avoid direct cast PyCFunctionWithKeywords->PyCFunction --- modules/python/src2/cv2.cpp | 31 +++++++++++++++++++------------ modules/python/src2/gen2.py | 8 ++------ 2 files changed, 21 insertions(+), 18 deletions(-) diff --git a/modules/python/src2/cv2.cpp b/modules/python/src2/cv2.cpp index 4101022ec6..c36deae64d 100644 --- a/modules/python/src2/cv2.cpp +++ b/modules/python/src2/cv2.cpp @@ -10,6 +10,13 @@ #pragma warning(pop) #endif +#define CV_PY_FN_WITH_KW_(fn, flags) (PyCFunction)(void*)(PyCFunctionWithKeywords)(fn), (flags) | METH_VARARGS | METH_KEYWORDS +#define CV_PY_FN_NOARGS_(fn, flags) (PyCFunction)(fn), (flags) | METH_NOARGS + +#define CV_PY_FN_WITH_KW(fn) CV_PY_FN_WITH_KW_(fn, 0) +#define CV_PY_FN_NOARGS(fn) CV_PY_FN_NOARGS_(fn, 0) + + #define MODULESTR "cv2" #define NPY_NO_DEPRECATED_API NPY_1_7_API_VERSION #include @@ -597,22 +604,22 @@ static PyObject * UMatWrapper_offset_getter(cv2_UMatWrapperObject* self, void*) } static PyMethodDef UMatWrapper_methods[] = { - {"get", (PyCFunction)UMatWrapper_get, METH_NOARGS, + {"get", CV_PY_FN_NOARGS(UMatWrapper_get), "Returns numpy array" }, - {"handle", (PyCFunction)UMatWrapper_handle, METH_VARARGS | METH_KEYWORDS, + {"handle", CV_PY_FN_WITH_KW(UMatWrapper_handle), "Returns UMat native handle" }, - {"isContinuous", (PyCFunction)UMatWrapper_isContinuous, METH_NOARGS, + {"isContinuous", CV_PY_FN_NOARGS(UMatWrapper_isContinuous), "Returns true if the matrix data is continuous" }, - {"isSubmatrix", (PyCFunction)UMatWrapper_isSubmatrix, METH_NOARGS, + {"isSubmatrix", CV_PY_FN_NOARGS(UMatWrapper_isSubmatrix), "Returns true if the matrix is a submatrix of another matrix" }, - {"context", (PyCFunction)UMatWrapper_context, METH_NOARGS | METH_STATIC, + {"context", CV_PY_FN_NOARGS_(UMatWrapper_context, METH_STATIC), "Returns OpenCL context handle" }, - {"queue", (PyCFunction)UMatWrapper_queue, METH_NOARGS | METH_STATIC, + {"queue", CV_PY_FN_NOARGS_(UMatWrapper_queue, METH_STATIC), "Returns OpenCL queue handle" }, {NULL, NULL, 0, NULL} /* Sentinel */ @@ -1778,15 +1785,15 @@ static int convert_to_char(PyObject *o, char *dst, const char *name = "no_name") #include "pyopencv_generated_funcs.h" static PyMethodDef special_methods[] = { - {"redirectError", (PyCFunction)pycvRedirectError, METH_VARARGS | METH_KEYWORDS, "redirectError(onError) -> None"}, + {"redirectError", CV_PY_FN_WITH_KW(pycvRedirectError), "redirectError(onError) -> None"}, #ifdef HAVE_OPENCV_HIGHGUI - {"createTrackbar", pycvCreateTrackbar, METH_VARARGS, "createTrackbar(trackbarName, windowName, value, count, onChange) -> None"}, - {"createButton", (PyCFunction)pycvCreateButton, METH_VARARGS | METH_KEYWORDS, "createButton(buttonName, onChange [, userData, buttonType, initialButtonState]) -> None"}, - {"setMouseCallback", (PyCFunction)pycvSetMouseCallback, METH_VARARGS | METH_KEYWORDS, "setMouseCallback(windowName, onMouse [, param]) -> None"}, + {"createTrackbar", (PyCFunction)pycvCreateTrackbar, METH_VARARGS, "createTrackbar(trackbarName, windowName, value, count, onChange) -> None"}, + {"createButton", CV_PY_FN_WITH_KW(pycvCreateButton), "createButton(buttonName, onChange [, userData, buttonType, initialButtonState]) -> None"}, + {"setMouseCallback", CV_PY_FN_WITH_KW(pycvSetMouseCallback), "setMouseCallback(windowName, onMouse [, param]) -> None"}, #endif #ifdef HAVE_OPENCV_DNN - {"dnn_registerLayer", (PyCFunction)pyopencv_cv_dnn_registerLayer, METH_VARARGS | METH_KEYWORDS, "registerLayer(type, class) -> None"}, - {"dnn_unregisterLayer", (PyCFunction)pyopencv_cv_dnn_unregisterLayer, METH_VARARGS | METH_KEYWORDS, "unregisterLayer(type) -> None"}, + {"dnn_registerLayer", CV_PY_FN_WITH_KW(pyopencv_cv_dnn_registerLayer), "registerLayer(type, class) -> None"}, + {"dnn_unregisterLayer", CV_PY_FN_WITH_KW(pyopencv_cv_dnn_unregisterLayer), "unregisterLayer(type) -> None"}, #endif {NULL, NULL}, }; diff --git a/modules/python/src2/gen2.py b/modules/python/src2/gen2.py index 27928bc485..6995e23651 100755 --- a/modules/python/src2/gen2.py +++ b/modules/python/src2/gen2.py @@ -599,13 +599,9 @@ class FuncInfo(object): # Convert unicode chars to xml representation, but keep as string instead of bytes full_docstring = full_docstring.encode('ascii', errors='xmlcharrefreplace').decode() - flags = ["METH_VARARGS", "METH_KEYWORDS"] - if self.isclassmethod: - flags.append("METH_CLASS") - - return Template(' {"$py_funcname", (PyCFunction)$wrap_funcname, $flags, "$py_docstring"},\n' + return Template(' {"$py_funcname", CV_PY_FN_WITH_KW_($wrap_funcname, $flags), "$py_docstring"},\n' ).substitute(py_funcname = self.variants[0].wname, wrap_funcname=self.get_wrapper_name(), - flags = " | ".join(flags), py_docstring = full_docstring) + flags = 'METH_CLASS' if self.isclassmethod else '0', py_docstring = full_docstring) def gen_code(self, codegen): all_classes = codegen.classes From c2d40b465d2179eedab1bd4b0d031fe3e585c5ea Mon Sep 17 00:00:00 2001 From: Alexander Alekhin Date: Wed, 30 May 2018 18:07:03 +0300 Subject: [PATCH 10/40] python: fix arg types of cv2_UMatWrapperObject functions --- modules/python/src2/cv2.cpp | 37 +++++++++++++++++++++++++++++-------- 1 file changed, 29 insertions(+), 8 deletions(-) diff --git a/modules/python/src2/cv2.cpp b/modules/python/src2/cv2.cpp index c36deae64d..e16fcbacf2 100644 --- a/modules/python/src2/cv2.cpp +++ b/modules/python/src2/cv2.cpp @@ -481,8 +481,14 @@ typedef struct { static bool PyObject_IsUMat(PyObject *o); // UMatWrapper init - try to map arguments from python to UMat constructors -static int UMatWrapper_init(cv2_UMatWrapperObject *self, PyObject *args, PyObject *kwds) +static int UMatWrapper_init(PyObject* self_, PyObject *args, PyObject *kwds) { + cv2_UMatWrapperObject* self = (cv2_UMatWrapperObject*)self_; + if (self == NULL) + { + PyErr_SetString(PyExc_TypeError, "Internal error"); + return -1; + } self->um = NULL; { // constructor () @@ -555,8 +561,11 @@ static void UMatWrapper_dealloc(cv2_UMatWrapperObject* self) // UMatWrapper.get() - returns numpy array by transferring UMat data to Mat and than wrapping it to numpy array // (using numpy allocator - and so without unnecessary copy) -static PyObject * UMatWrapper_get(cv2_UMatWrapperObject* self) +static PyObject * UMatWrapper_get(PyObject* self_, PyObject * /*args*/) { + cv2_UMatWrapperObject* self = (cv2_UMatWrapperObject*)self_; + if (self == NULL) + return failmsgp("Incorrect type of self (must be 'cv2_UMatWrapperObject')"); Mat m; m.allocator = &g_numpyAllocator; self->um->copyTo(m); @@ -565,8 +574,11 @@ static PyObject * UMatWrapper_get(cv2_UMatWrapperObject* self) } // UMatWrapper.handle() - returns the OpenCL handle of the UMat object -static PyObject * UMatWrapper_handle(cv2_UMatWrapperObject* self, PyObject *args, PyObject *kwds) +static PyObject * UMatWrapper_handle(PyObject* self_, PyObject *args, PyObject *kwds) { + cv2_UMatWrapperObject* self = (cv2_UMatWrapperObject*)self_; + if (self == NULL) + return failmsgp("Incorrect type of self (must be 'cv2_UMatWrapperObject')"); const char *kwlist[] = {"accessFlags", NULL}; int accessFlags; if (!PyArg_ParseTupleAndKeywords(args, kwds, "i", (char**) kwlist, &accessFlags)) @@ -575,31 +587,40 @@ static PyObject * UMatWrapper_handle(cv2_UMatWrapperObject* self, PyObject *args } // UMatWrapper.isContinuous() - returns true if the matrix data is continuous -static PyObject * UMatWrapper_isContinuous(cv2_UMatWrapperObject* self) +static PyObject * UMatWrapper_isContinuous(PyObject* self_, PyObject * /*args*/) { + cv2_UMatWrapperObject* self = (cv2_UMatWrapperObject*)self_; + if (self == NULL) + return failmsgp("Incorrect type of self (must be 'cv2_UMatWrapperObject')"); return PyBool_FromLong(self->um->isContinuous()); } // UMatWrapper.isContinuous() - returns true if the matrix is a submatrix of another matrix -static PyObject * UMatWrapper_isSubmatrix(cv2_UMatWrapperObject* self) +static PyObject * UMatWrapper_isSubmatrix(PyObject* self_, PyObject * /*args*/) { + cv2_UMatWrapperObject* self = (cv2_UMatWrapperObject*)self_; + if (self == NULL) + return failmsgp("Incorrect type of self (must be 'cv2_UMatWrapperObject')"); return PyBool_FromLong(self->um->isSubmatrix()); } // UMatWrapper.context() - returns the OpenCL context used by OpenCV UMat -static PyObject * UMatWrapper_context(cv2_UMatWrapperObject*) +static PyObject * UMatWrapper_context(PyObject* /*self_*/, PyObject * /*args*/) { return PyLong_FromVoidPtr(cv::ocl::Context::getDefault().ptr()); } // UMatWrapper.context() - returns the OpenCL queue used by OpenCV UMat -static PyObject * UMatWrapper_queue(cv2_UMatWrapperObject*) +static PyObject * UMatWrapper_queue(PyObject* /*self_*/, PyObject * /*args*/) { return PyLong_FromVoidPtr(cv::ocl::Queue::getDefault().ptr()); } -static PyObject * UMatWrapper_offset_getter(cv2_UMatWrapperObject* self, void*) +static PyObject * UMatWrapper_offset_getter(PyObject* self_, void*) { + cv2_UMatWrapperObject* self = (cv2_UMatWrapperObject*)self_; + if (self == NULL) + return failmsgp("Incorrect type of self (must be 'cv2_UMatWrapperObject')"); return PyLong_FromSsize_t(self->um->offset); } From 80770aacd781aafe76e2eb401bbe8a6a2a18761f Mon Sep 17 00:00:00 2001 From: Spark Echo Date: Thu, 31 May 2018 15:23:19 +0800 Subject: [PATCH 11/40] Merge pull request #11631 from sparkecho:3.4 Typo fixes (#11631) --- samples/dnn/face_detector/how_to_train_face_detector.txt | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/samples/dnn/face_detector/how_to_train_face_detector.txt b/samples/dnn/face_detector/how_to_train_face_detector.txt index 9c170fadd6..11602297f2 100644 --- a/samples/dnn/face_detector/how_to_train_face_detector.txt +++ b/samples/dnn/face_detector/how_to_train_face_detector.txt @@ -13,7 +13,7 @@ The data preparation pipeline can be represented as: a) Find some datasets with face bounding boxes annotation. For some reasons I can't provide links here, but you easily find them on your own. Also study the data. It may contain small or low quality faces which can spoil training process. Often there are special flags about object quality in annotation. Remove such faces from annotation (smaller when 16 along at least one side, or blurred, of highly-occluded, or something else). b) The downloaded dataset will have some format of annotation. It may be one single file for all images, or separate file for each image or something else. But to train SSD in Caffe you need to convert annotation to PASCAL VOC format. -PASCAL VOC annoitation consist of .xml file for each image. In this xml file all face bounding boxes should be listed as: +PASCAL VOC annotation consist of .xml file for each image. In this xml file all face bounding boxes should be listed as: @@ -42,7 +42,7 @@ PASCAL VOC annoitation consist of .xml file for each image. In this xml file all -So, convert your dataset's annotation to the fourmat above. +So, convert your dataset's annotation to the format above. Also, you should create labelmap.prototxt file with the following content: item { name: "none_of_the_above" @@ -76,4 +76,4 @@ mkdir -p log /path_for_caffe_build_dir/tools/caffe train -solver="solver.prototxt" -gpu 0 2>&1 | tee -a log/log.log And wait. It will take about 8 hours to finish the process. -After it you can use your .caffemodel from snapshot/ subdirectory in resnet_face_ssd_python.py sample. \ No newline at end of file +After it you can use your .caffemodel from snapshot/ subdirectory in resnet_face_ssd_python.py sample. From f96f934426a90e2f9d4fe2deb7ccd099c0255198 Mon Sep 17 00:00:00 2001 From: Dmitry Kurtaev Date: Thu, 31 May 2018 14:05:21 +0300 Subject: [PATCH 12/40] Update Intel's Inference Engine deep learning backend (#11587) * Update Intel's Inference Engine deep learning backend * Remove cpu_extension dependency * Update Darknet accuracy tests --- cmake/OpenCVDetectInferenceEngine.cmake | 6 +- modules/dnn/include/opencv2/dnn/dnn.hpp | 7 +- modules/dnn/perf/perf_net.cpp | 68 +++++++++++---- modules/dnn/src/darknet/darknet_io.cpp | 2 +- modules/dnn/src/dnn.cpp | 6 +- modules/dnn/src/layers/elementwise_layers.cpp | 10 ++- modules/dnn/src/layers/prior_box_layer.cpp | 33 ++++++-- modules/dnn/src/layers/region_layer.cpp | 5 -- .../layers/resize_nearest_neighbor_layer.cpp | 27 ++++++ modules/dnn/src/op_inf_engine.cpp | 47 +++++------ modules/dnn/test/test_backends.cpp | 83 +++++++++++++------ modules/dnn/test/test_common.hpp | 22 +++++ modules/dnn/test/test_darknet_importer.cpp | 63 +++++++++++--- modules/dnn/test/test_precomp.hpp | 2 +- samples/dnn/classification.cpp | 2 +- samples/dnn/classification.py | 2 +- samples/dnn/object_detection.cpp | 2 +- samples/dnn/object_detection.py | 2 +- samples/dnn/segmentation.cpp | 2 +- samples/dnn/segmentation.py | 2 +- 20 files changed, 280 insertions(+), 113 deletions(-) diff --git a/cmake/OpenCVDetectInferenceEngine.cmake b/cmake/OpenCVDetectInferenceEngine.cmake index d941ac2c8c..3f9318ff2d 100644 --- a/cmake/OpenCVDetectInferenceEngine.cmake +++ b/cmake/OpenCVDetectInferenceEngine.cmake @@ -41,8 +41,7 @@ set(INF_ENGINE_INCLUDE_DIRS "${INF_ENGINE_ROOT_DIR}/include" CACHE PATH "Path to if(NOT INF_ENGINE_ROOT_DIR OR NOT EXISTS "${INF_ENGINE_ROOT_DIR}" - OR NOT EXISTS "${INF_ENGINE_INCLUDE_DIRS}" - OR NOT EXISTS "${INF_ENGINE_INCLUDE_DIRS}/inference_engine.hpp" + OR NOT EXISTS "${INF_ENGINE_ROOT_DIR}/include/inference_engine.hpp" ) ie_fail() endif() @@ -52,10 +51,7 @@ set(INF_ENGINE_LIBRARIES "") set(ie_lib_list inference_engine) link_directories( - ${INTEL_CVSDK_DIR}/external/mklml_lnx/lib - ${INTEL_CVSDK_DIR}/inference_engine/external/mklml_lnx/lib ${INTEL_CVSDK_DIR}/inference_engine/external/mkltiny_lnx/lib - ${INTEL_CVSDK_DIR}/external/cldnn/lib ${INTEL_CVSDK_DIR}/inference_engine/external/cldnn/lib ) diff --git a/modules/dnn/include/opencv2/dnn/dnn.hpp b/modules/dnn/include/opencv2/dnn/dnn.hpp index 6ac2f1a7fe..3a1108663c 100644 --- a/modules/dnn/include/opencv2/dnn/dnn.hpp +++ b/modules/dnn/include/opencv2/dnn/dnn.hpp @@ -81,7 +81,8 @@ CV__DNN_EXPERIMENTAL_NS_BEGIN { DNN_TARGET_CPU, DNN_TARGET_OPENCL, - DNN_TARGET_OPENCL_FP16 + DNN_TARGET_OPENCL_FP16, + DNN_TARGET_MYRIAD }; /** @brief This class provides all data needed to initialize layer. @@ -700,13 +701,13 @@ CV__DNN_EXPERIMENTAL_NS_BEGIN * * `*.pb` (TensorFlow, https://www.tensorflow.org/) * * `*.t7` | `*.net` (Torch, http://torch.ch/) * * `*.weights` (Darknet, https://pjreddie.com/darknet/) - * * `*.bin` (DLDT, https://software.seek.intel.com/deep-learning-deployment) + * * `*.bin` (DLDT, https://software.intel.com/openvino-toolkit) * @param[in] config Text file contains network configuration. It could be a * file with the following extensions: * * `*.prototxt` (Caffe, http://caffe.berkeleyvision.org/) * * `*.pbtxt` (TensorFlow, https://www.tensorflow.org/) * * `*.cfg` (Darknet, https://pjreddie.com/darknet/) - * * `*.xml` (DLDT, https://software.seek.intel.com/deep-learning-deployment) + * * `*.xml` (DLDT, https://software.intel.com/openvino-toolkit) * @param[in] framework Explicit framework name tag to determine a format. * @returns Net object. * diff --git a/modules/dnn/perf/perf_net.cpp b/modules/dnn/perf/perf_net.cpp index c05a7088cd..aa4ac05881 100644 --- a/modules/dnn/perf/perf_net.cpp +++ b/modules/dnn/perf/perf_net.cpp @@ -13,7 +13,7 @@ namespace opencv_test { CV_ENUM(DNNBackend, DNN_BACKEND_DEFAULT, DNN_BACKEND_HALIDE, DNN_BACKEND_INFERENCE_ENGINE) -CV_ENUM(DNNTarget, DNN_TARGET_CPU, DNN_TARGET_OPENCL, DNN_TARGET_OPENCL_FP16) +CV_ENUM(DNNTarget, DNN_TARGET_CPU, DNN_TARGET_OPENCL, DNN_TARGET_OPENCL_FP16, DNN_TARGET_MYRIAD) class DNNTestNetwork : public ::perf::TestBaseWithParam< tuple > { @@ -29,6 +29,28 @@ public: target = (dnn::Target)(int)get<1>(GetParam()); } + static bool checkMyriadTarget() + { +#ifndef HAVE_INF_ENGINE + return false; +#endif + cv::dnn::Net net; + cv::dnn::LayerParams lp; + net.addLayerToPrev("testLayer", "Identity", lp); + net.setPreferableBackend(cv::dnn::DNN_BACKEND_INFERENCE_ENGINE); + net.setPreferableTarget(cv::dnn::DNN_TARGET_MYRIAD); + net.setInput(cv::Mat::zeros(1, 1, CV_32FC1)); + try + { + net.forward(); + } + catch(...) + { + return false; + } + return true; + } + void processNet(std::string weights, std::string proto, std::string halide_scheduler, const Mat& input, const std::string& outputLayer = "") { @@ -41,6 +63,13 @@ public: throw cvtest::SkipTestException("OpenCL is not available/disabled in OpenCV"); } } + if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD) + { + if (!checkMyriadTarget()) + { + throw SkipTestException("Myriad is not available/disabled in OpenCV"); + } + } randu(input, 0.0f, 1.0f); @@ -87,8 +116,6 @@ public: PERF_TEST_P_(DNNTestNetwork, AlexNet) { - if (backend == DNN_BACKEND_INFERENCE_ENGINE && target != DNN_TARGET_CPU) - throw SkipTestException(""); processNet("dnn/bvlc_alexnet.caffemodel", "dnn/bvlc_alexnet.prototxt", "alexnet.yml", Mat(cv::Size(227, 227), CV_32FC3)); } @@ -130,7 +157,6 @@ PERF_TEST_P_(DNNTestNetwork, ENet) PERF_TEST_P_(DNNTestNetwork, SSD) { - if (backend == DNN_BACKEND_INFERENCE_ENGINE) throw SkipTestException(""); processNet("dnn/VGG_ILSVRC2016_SSD_300x300_iter_440000.caffemodel", "dnn/ssd_vgg16.prototxt", "disabled", Mat(cv::Size(300, 300), CV_32FC3)); } @@ -146,18 +172,17 @@ PERF_TEST_P_(DNNTestNetwork, OpenFace) PERF_TEST_P_(DNNTestNetwork, MobileNet_SSD_Caffe) { - if (backend == DNN_BACKEND_HALIDE || - backend == DNN_BACKEND_INFERENCE_ENGINE && target != DNN_TARGET_CPU) + if (backend == DNN_BACKEND_HALIDE) throw SkipTestException(""); processNet("dnn/MobileNetSSD_deploy.caffemodel", "dnn/MobileNetSSD_deploy.prototxt", "", Mat(cv::Size(300, 300), CV_32FC3)); } +// TODO: update MobileNet model. PERF_TEST_P_(DNNTestNetwork, MobileNet_SSD_TensorFlow) { - if (backend == DNN_BACKEND_DEFAULT && target == DNN_TARGET_OPENCL || - backend == DNN_BACKEND_HALIDE || - backend == DNN_BACKEND_INFERENCE_ENGINE && target != DNN_TARGET_CPU) + if (backend == DNN_BACKEND_HALIDE || + backend == DNN_BACKEND_INFERENCE_ENGINE) throw SkipTestException(""); processNet("dnn/ssd_mobilenet_v1_coco.pb", "ssd_mobilenet_v1_coco.pbtxt", "", Mat(cv::Size(300, 300), CV_32FC3)); @@ -166,7 +191,8 @@ PERF_TEST_P_(DNNTestNetwork, MobileNet_SSD_TensorFlow) PERF_TEST_P_(DNNTestNetwork, DenseNet_121) { if (backend == DNN_BACKEND_HALIDE || - backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_OPENCL_FP16) + backend == DNN_BACKEND_INFERENCE_ENGINE && (target == DNN_TARGET_OPENCL_FP16 || + target == DNN_TARGET_MYRIAD)) throw SkipTestException(""); processNet("dnn/DenseNet_121.caffemodel", "dnn/DenseNet_121.prototxt", "", Mat(cv::Size(224, 224), CV_32FC3)); @@ -174,21 +200,27 @@ PERF_TEST_P_(DNNTestNetwork, DenseNet_121) PERF_TEST_P_(DNNTestNetwork, OpenPose_pose_coco) { - if (backend == DNN_BACKEND_HALIDE) throw SkipTestException(""); + if (backend == DNN_BACKEND_HALIDE || + backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD) + throw SkipTestException(""); processNet("dnn/openpose_pose_coco.caffemodel", "dnn/openpose_pose_coco.prototxt", "", Mat(cv::Size(368, 368), CV_32FC3)); } PERF_TEST_P_(DNNTestNetwork, OpenPose_pose_mpi) { - if (backend == DNN_BACKEND_HALIDE) throw SkipTestException(""); + if (backend == DNN_BACKEND_HALIDE || + backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD) + throw SkipTestException(""); processNet("dnn/openpose_pose_mpi.caffemodel", "dnn/openpose_pose_mpi.prototxt", "", Mat(cv::Size(368, 368), CV_32FC3)); } PERF_TEST_P_(DNNTestNetwork, OpenPose_pose_mpi_faster_4_stages) { - if (backend == DNN_BACKEND_HALIDE) throw SkipTestException(""); + if (backend == DNN_BACKEND_HALIDE || + backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD) + throw SkipTestException(""); // The same .caffemodel but modified .prototxt // See https://github.com/CMU-Perceptual-Computing-Lab/openpose/blob/master/src/openpose/pose/poseParameters.cpp processNet("dnn/openpose_pose_mpi.caffemodel", "dnn/openpose_pose_mpi_faster_4_stages.prototxt", "", @@ -197,8 +229,7 @@ PERF_TEST_P_(DNNTestNetwork, OpenPose_pose_mpi_faster_4_stages) PERF_TEST_P_(DNNTestNetwork, opencv_face_detector) { - if (backend == DNN_BACKEND_HALIDE || - backend == DNN_BACKEND_INFERENCE_ENGINE && target != DNN_TARGET_CPU) + if (backend == DNN_BACKEND_HALIDE) throw SkipTestException(""); processNet("dnn/opencv_face_detector.caffemodel", "dnn/opencv_face_detector.prototxt", "", Mat(cv::Size(300, 300), CV_32FC3)); @@ -207,7 +238,8 @@ PERF_TEST_P_(DNNTestNetwork, opencv_face_detector) PERF_TEST_P_(DNNTestNetwork, Inception_v2_SSD_TensorFlow) { if (backend == DNN_BACKEND_HALIDE || - backend == DNN_BACKEND_INFERENCE_ENGINE && target != DNN_TARGET_CPU) + (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_OPENCL) || + (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_OPENCL_FP16)) throw SkipTestException(""); processNet("dnn/ssd_inception_v2_coco_2017_11_17.pb", "ssd_inception_v2_coco_2017_11_17.pbtxt", "", Mat(cv::Size(300, 300), CV_32FC3)); @@ -215,7 +247,8 @@ PERF_TEST_P_(DNNTestNetwork, Inception_v2_SSD_TensorFlow) PERF_TEST_P_(DNNTestNetwork, YOLOv3) { - if (backend != DNN_BACKEND_DEFAULT) + if (backend == DNN_BACKEND_HALIDE || + backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD) throw SkipTestException(""); Mat sample = imread(findDataFile("dnn/dog416.png", false)); Mat inp; @@ -232,6 +265,7 @@ const tuple testCases[] = { tuple(DNN_BACKEND_INFERENCE_ENGINE, DNN_TARGET_CPU), tuple(DNN_BACKEND_INFERENCE_ENGINE, DNN_TARGET_OPENCL), tuple(DNN_BACKEND_INFERENCE_ENGINE, DNN_TARGET_OPENCL_FP16), + tuple(DNN_BACKEND_INFERENCE_ENGINE, DNN_TARGET_MYRIAD), #endif tuple(DNN_BACKEND_DEFAULT, DNN_TARGET_CPU), tuple(DNN_BACKEND_DEFAULT, DNN_TARGET_OPENCL), diff --git a/modules/dnn/src/darknet/darknet_io.cpp b/modules/dnn/src/darknet/darknet_io.cpp index 71f762a09d..707cc29095 100644 --- a/modules/dnn/src/darknet/darknet_io.cpp +++ b/modules/dnn/src/darknet/darknet_io.cpp @@ -288,7 +288,7 @@ namespace cv { permute_params.set("order", paramOrder); darknet::LayerParameter lp; - std::string layer_name = cv::format("premute_%d", layer_id); + std::string layer_name = cv::format("permute_%d", layer_id); lp.layer_name = layer_name; lp.layer_type = permute_params.type; lp.layerParams = permute_params; diff --git a/modules/dnn/src/dnn.cpp b/modules/dnn/src/dnn.cpp index 1a1002c794..973c98abc3 100644 --- a/modules/dnn/src/dnn.cpp +++ b/modules/dnn/src/dnn.cpp @@ -1182,7 +1182,9 @@ struct Net::Impl for (it = layers.begin(); it != layers.end(); ++it) { LayerData &ld = it->second; - bool fused = ld.skip && ld.id != 0; + if (ld.id == 0) + continue; + bool fused = ld.skip; Ptr layer = ld.layerInstance; if (!layer->supportBackend(preferableBackend)) @@ -1259,7 +1261,7 @@ struct Net::Impl CV_Assert(!ieNode.empty()); ieNode->net = net; - if (preferableTarget == DNN_TARGET_OPENCL_FP16 && !fused) + if ((preferableTarget == DNN_TARGET_OPENCL_FP16 || preferableTarget == DNN_TARGET_MYRIAD) && !fused) { ieNode->layer->precision = InferenceEngine::Precision::FP16; auto weightableLayer = std::dynamic_pointer_cast(ieNode->layer); diff --git a/modules/dnn/src/layers/elementwise_layers.cpp b/modules/dnn/src/layers/elementwise_layers.cpp index f57ef01375..32d39970ab 100644 --- a/modules/dnn/src/layers/elementwise_layers.cpp +++ b/modules/dnn/src/layers/elementwise_layers.cpp @@ -117,7 +117,7 @@ public: { return backendId == DNN_BACKEND_DEFAULT || backendId == DNN_BACKEND_HALIDE && haveHalide() || - backendId == DNN_BACKEND_INFERENCE_ENGINE && haveInfEngine() && this->type != "Sigmoid"; + backendId == DNN_BACKEND_INFERENCE_ENGINE && haveInfEngine(); } virtual Ptr tryAttach(const Ptr& node) CV_OVERRIDE @@ -334,6 +334,7 @@ struct ReLUFunctor lp.type = "ReLU"; std::shared_ptr ieLayer(new InferenceEngine::ReLULayer(lp)); ieLayer->negative_slope = slope; + ieLayer->params["negative_slope"] = format("%f", slope); return ieLayer; } #endif // HAVE_INF_ENGINE @@ -431,6 +432,8 @@ struct ReLU6Functor std::shared_ptr ieLayer(new InferenceEngine::ClampLayer(lp)); ieLayer->min_value = minValue; ieLayer->max_value = maxValue; + ieLayer->params["min"] = format("%f", minValue); + ieLayer->params["max"] = format("%f", maxValue); return ieLayer; } #endif // HAVE_INF_ENGINE @@ -556,8 +559,9 @@ struct SigmoidFunctor #ifdef HAVE_INF_ENGINE InferenceEngine::CNNLayerPtr initInfEngine(InferenceEngine::LayerParams& lp) { - CV_Error(Error::StsNotImplemented, "Sigmoid"); - return InferenceEngine::CNNLayerPtr(); + lp.type = "Sigmoid"; + std::shared_ptr ieLayer(new InferenceEngine::CNNLayer(lp)); + return ieLayer; } #endif // HAVE_INF_ENGINE diff --git a/modules/dnn/src/layers/prior_box_layer.cpp b/modules/dnn/src/layers/prior_box_layer.cpp index b854c2602a..74c0d31f1d 100644 --- a/modules/dnn/src/layers/prior_box_layer.cpp +++ b/modules/dnn/src/layers/prior_box_layer.cpp @@ -271,7 +271,7 @@ public: virtual bool supportBackend(int backendId) CV_OVERRIDE { return backendId == DNN_BACKEND_DEFAULT || - backendId == DNN_BACKEND_INFERENCE_ENGINE && haveInfEngine() && !_explicitSizes; + backendId == DNN_BACKEND_INFERENCE_ENGINE && haveInfEngine(); } bool getMemoryShapes(const std::vector &inputs, @@ -484,18 +484,33 @@ public: #ifdef HAVE_INF_ENGINE InferenceEngine::LayerParams lp; lp.name = name; - lp.type = "PriorBox"; + lp.type = _explicitSizes ? "PriorBoxClustered" : "PriorBox"; lp.precision = InferenceEngine::Precision::FP32; std::shared_ptr ieLayer(new InferenceEngine::CNNLayer(lp)); - ieLayer->params["min_size"] = format("%f", _minSize); - ieLayer->params["max_size"] = _maxSize > 0 ? format("%f", _maxSize) : ""; - - if (!_aspectRatios.empty()) + if (_explicitSizes) + { + CV_Assert(!_boxWidths.empty(), !_boxHeights.empty(), + _boxWidths.size() == _boxHeights.size()); + ieLayer->params["width"] = format("%f", _boxWidths[0]); + ieLayer->params["height"] = format("%f", _boxHeights[0]); + for (int i = 1; i < _boxWidths.size(); ++i) + { + ieLayer->params["width"] += format(",%f", _boxWidths[i]); + ieLayer->params["height"] += format(",%f", _boxHeights[i]); + } + } + else { - ieLayer->params["aspect_ratio"] = format("%f", _aspectRatios[0]); - for (int i = 1; i < _aspectRatios.size(); ++i) - ieLayer->params["aspect_ratio"] += format(",%f", _aspectRatios[i]); + ieLayer->params["min_size"] = format("%f", _minSize); + ieLayer->params["max_size"] = _maxSize > 0 ? format("%f", _maxSize) : ""; + + if (!_aspectRatios.empty()) + { + ieLayer->params["aspect_ratio"] = format("%f", _aspectRatios[0]); + for (int i = 1; i < _aspectRatios.size(); ++i) + ieLayer->params["aspect_ratio"] += format(",%f", _aspectRatios[i]); + } } ieLayer->params["flip"] = "0"; // We already flipped aspect ratios. diff --git a/modules/dnn/src/layers/region_layer.cpp b/modules/dnn/src/layers/region_layer.cpp index 125fa0d14d..50e68b2fa5 100644 --- a/modules/dnn/src/layers/region_layer.cpp +++ b/modules/dnn/src/layers/region_layer.cpp @@ -95,11 +95,6 @@ public: return false; } - virtual bool supportBackend(int backendId) CV_OVERRIDE - { - return backendId == DNN_BACKEND_DEFAULT; - } - float logistic_activate(float x) { return 1.F / (1.F + exp(-x)); } void softmax_activate(const float* input, const int n, const float temp, float* output) diff --git a/modules/dnn/src/layers/resize_nearest_neighbor_layer.cpp b/modules/dnn/src/layers/resize_nearest_neighbor_layer.cpp index e9a966296e..448ea25ee4 100644 --- a/modules/dnn/src/layers/resize_nearest_neighbor_layer.cpp +++ b/modules/dnn/src/layers/resize_nearest_neighbor_layer.cpp @@ -6,6 +6,7 @@ // Third party copyrights are property of their respective owners. #include "../precomp.hpp" #include "layers_common.hpp" +#include "../op_inf_engine.hpp" #include namespace cv { namespace dnn { @@ -39,6 +40,12 @@ public: return (outputs[0][2] == inputs[0][2]) && (outputs[0][3] == inputs[0][3]); } + virtual bool supportBackend(int backendId) CV_OVERRIDE + { + return backendId == DNN_BACKEND_DEFAULT || + backendId == DNN_BACKEND_INFERENCE_ENGINE && haveInfEngine(); + } + virtual void finalize(const std::vector& inputs, std::vector &outputs) CV_OVERRIDE { if (!outWidth && !outHeight) @@ -75,6 +82,26 @@ public: } } } + + virtual Ptr initInfEngine(const std::vector >&) CV_OVERRIDE + { +#ifdef HAVE_INF_ENGINE + InferenceEngine::LayerParams lp; + lp.name = name; + lp.type = "Resample"; + lp.precision = InferenceEngine::Precision::FP32; + + std::shared_ptr ieLayer(new InferenceEngine::CNNLayer(lp)); + ieLayer->params["type"] = "caffe.ResampleParameter.NEAREST"; + ieLayer->params["antialias"] = "0"; + ieLayer->params["width"] = cv::format("%d", outWidth); + ieLayer->params["height"] = cv::format("%d", outHeight); + + return Ptr(new InfEngineBackendNode(ieLayer)); +#endif // HAVE_INF_ENGINE + return Ptr(); + } + private: int outWidth, outHeight, zoomFactor; bool alignCorners; diff --git a/modules/dnn/src/op_inf_engine.cpp b/modules/dnn/src/op_inf_engine.cpp index c24b137107..710d6e5a88 100644 --- a/modules/dnn/src/op_inf_engine.cpp +++ b/modules/dnn/src/op_inf_engine.cpp @@ -18,11 +18,6 @@ namespace cv { namespace dnn { #ifdef HAVE_INF_ENGINE -static int infEngineVersion() -{ - return std::atoi(InferenceEngine::GetInferenceEngineVersion()->buildNumber); -} - InfEngineBackendNode::InfEngineBackendNode(const InferenceEngine::CNNLayerPtr& _layer) : BackendNode(DNN_BACKEND_INFERENCE_ENGINE), layer(_layer) {} @@ -59,27 +54,23 @@ infEngineWrappers(const std::vector >& ptrs) return wrappers; } +static InferenceEngine::Layout estimateLayout(const Mat& m) +{ + if (m.dims == 4) + return InferenceEngine::Layout::NCHW; + else if (m.dims == 2) + return InferenceEngine::Layout::NC; + else + return InferenceEngine::Layout::ANY; +} + static InferenceEngine::DataPtr wrapToInfEngineDataNode(const Mat& m, const std::string& name = "") { std::vector reversedShape(&m.size[0], &m.size[0] + m.dims); std::reverse(reversedShape.begin(), reversedShape.end()); - if (infEngineVersion() > 5855) - { - InferenceEngine::Layout l = InferenceEngine::Layout::ANY; - if (m.dims == 4) - l = InferenceEngine::Layout::NCHW; - else if (m.dims == 2) - l = InferenceEngine::Layout::NC; - return InferenceEngine::DataPtr( - new InferenceEngine::Data(name, reversedShape, InferenceEngine::Precision::FP32, l) - ); - } - else - { - return InferenceEngine::DataPtr( - new InferenceEngine::Data(name, reversedShape, InferenceEngine::Precision::FP32) - ); - } + return InferenceEngine::DataPtr( + new InferenceEngine::Data(name, reversedShape, InferenceEngine::Precision::FP32, estimateLayout(m)) + ); } InferenceEngine::TBlob::Ptr wrapToInfEngineBlob(const Mat& m, const std::vector& shape, @@ -108,7 +99,7 @@ InfEngineBackendWrapper::InfEngineBackendWrapper(int targetId, const cv::Mat& m) : BackendWrapper(DNN_BACKEND_INFERENCE_ENGINE, targetId) { dataPtr = wrapToInfEngineDataNode(m); - blob = wrapToInfEngineBlob(m); + blob = wrapToInfEngineBlob(m, estimateLayout(m)); } InfEngineBackendWrapper::~InfEngineBackendWrapper() @@ -252,7 +243,8 @@ InfEngineBackendNet::getLayerByName(const char *layerName, InferenceEngine::CNNL void InfEngineBackendNet::setTargetDevice(InferenceEngine::TargetDevice device) noexcept { if (device != InferenceEngine::TargetDevice::eCPU && - device != InferenceEngine::TargetDevice::eGPU) + device != InferenceEngine::TargetDevice::eGPU && + device != InferenceEngine::TargetDevice::eMYRIAD) CV_Error(Error::StsNotImplemented, ""); targetDevice = device; } @@ -352,6 +344,11 @@ void InfEngineBackendNet::init(int targetId) case DNN_TARGET_CPU: setTargetDevice(InferenceEngine::TargetDevice::eCPU); break; case DNN_TARGET_OPENCL_FP16: setPrecision(InferenceEngine::Precision::FP16); // Fallback to the next. case DNN_TARGET_OPENCL: setTargetDevice(InferenceEngine::TargetDevice::eGPU); break; + case DNN_TARGET_MYRIAD: + { + setPrecision(InferenceEngine::Precision::FP16); + setTargetDevice(InferenceEngine::TargetDevice::eMYRIAD); break; + } default: CV_Error(Error::StsError, format("Unknown target identifier: %d", targetId)); } @@ -368,7 +365,7 @@ void InfEngineBackendNet::initPlugin(InferenceEngine::ICNNNetwork& net) InferenceEngine::ResponseDesc resp; plugin = InferenceEngine::PluginDispatcher({""}).getSuitablePlugin(targetDevice); - if (infEngineVersion() > 5855 && targetDevice == InferenceEngine::TargetDevice::eCPU) + if (targetDevice == InferenceEngine::TargetDevice::eCPU) { #ifdef _WIN32 InferenceEngine::IExtensionPtr extension = diff --git a/modules/dnn/test/test_backends.cpp b/modules/dnn/test/test_backends.cpp index 2bcd357e2e..8dd823e553 100644 --- a/modules/dnn/test/test_backends.cpp +++ b/modules/dnn/test/test_backends.cpp @@ -49,7 +49,14 @@ public: throw SkipTestException("OpenCL is not available/disabled in OpenCV"); } } - if (target == DNN_TARGET_OPENCL_FP16) + if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD) + { + if (!checkMyriadTarget()) + { + throw SkipTestException("Myriad is not available/disabled in OpenCV"); + } + } + if (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_MYRIAD) { l1 = l1 == 0.0 ? 4e-3 : l1; lInf = lInf == 0.0 ? 2e-2 : lInf; @@ -80,10 +87,7 @@ public: } Mat out = net.forward(outputLayer).clone(); - if (outputLayer == "detection_out") - normAssertDetections(outDefault, out, "First run", 0.2, l1, lInf); - else - normAssert(outDefault, out, "First run", l1, lInf); + check(outDefault, out, outputLayer, l1, lInf, "First run"); // Test 2: change input. float* inpData = (float*)inp.data; @@ -97,18 +101,33 @@ public: net.setInput(inp); outDefault = netDefault.forward(outputLayer).clone(); out = net.forward(outputLayer).clone(); + check(outDefault, out, outputLayer, l1, lInf, "Second run"); + } + void check(Mat& ref, Mat& out, const std::string& outputLayer, double l1, double lInf, const char* msg) + { if (outputLayer == "detection_out") - normAssertDetections(outDefault, out, "Second run", 0.2, l1, lInf); + { + if (backend == DNN_BACKEND_INFERENCE_ENGINE) + { + // Inference Engine produces detections terminated by a row which starts from -1. + out = out.reshape(1, out.total() / 7); + int numDetections = 0; + while (numDetections < out.rows && out.at(numDetections, 0) != -1) + { + numDetections += 1; + } + out = out.rowRange(0, numDetections); + } + normAssertDetections(ref, out, msg, 0.2, l1, lInf); + } else - normAssert(outDefault, out, "Second run", l1, lInf); + normAssert(ref, out, msg, l1, lInf); } }; TEST_P(DNNTestNetwork, AlexNet) { - if (backend == DNN_BACKEND_INFERENCE_ENGINE && target != DNN_TARGET_CPU) - throw SkipTestException(""); processNet("dnn/bvlc_alexnet.caffemodel", "dnn/bvlc_alexnet.prototxt", Size(227, 227), "prob", target == DNN_TARGET_OPENCL ? "dnn/halide_scheduler_opencl_alexnet.yml" : @@ -158,8 +177,7 @@ TEST_P(DNNTestNetwork, ENet) TEST_P(DNNTestNetwork, MobileNet_SSD_Caffe) { - if (backend == DNN_BACKEND_HALIDE || - backend == DNN_BACKEND_INFERENCE_ENGINE && target != DNN_TARGET_CPU) + if (backend == DNN_BACKEND_HALIDE) throw SkipTestException(""); Mat sample = imread(findDataFile("dnn/street.png", false)); Mat inp = blobFromImage(sample, 1.0f / 127.5, Size(300, 300), Scalar(127.5, 127.5, 127.5), false); @@ -170,10 +188,11 @@ TEST_P(DNNTestNetwork, MobileNet_SSD_Caffe) inp, "detection_out", "", l1, lInf); } +// TODO: update MobileNet model. TEST_P(DNNTestNetwork, MobileNet_SSD_TensorFlow) { if (backend == DNN_BACKEND_HALIDE || - backend == DNN_BACKEND_INFERENCE_ENGINE && target != DNN_TARGET_CPU) + backend == DNN_BACKEND_INFERENCE_ENGINE) throw SkipTestException(""); Mat sample = imread(findDataFile("dnn/street.png", false)); Mat inp = blobFromImage(sample, 1.0f / 127.5, Size(300, 300), Scalar(127.5, 127.5, 127.5), false); @@ -185,31 +204,38 @@ TEST_P(DNNTestNetwork, MobileNet_SSD_TensorFlow) TEST_P(DNNTestNetwork, SSD_VGG16) { - if ((backend == DNN_BACKEND_DEFAULT && target == DNN_TARGET_OPENCL_FP16) || - (backend == DNN_BACKEND_HALIDE && target == DNN_TARGET_CPU) || - (backend == DNN_BACKEND_INFERENCE_ENGINE && target != DNN_TARGET_CPU)) + if (backend == DNN_BACKEND_HALIDE && target == DNN_TARGET_CPU) throw SkipTestException(""); + double scoreThreshold = (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_MYRIAD) ? 0.0252 : 0.0; + Mat sample = imread(findDataFile("dnn/street.png", false)); + Mat inp = blobFromImage(sample, 1.0f, Size(300, 300), Scalar(), false); processNet("dnn/VGG_ILSVRC2016_SSD_300x300_iter_440000.caffemodel", - "dnn/ssd_vgg16.prototxt", Size(300, 300), "detection_out"); + "dnn/ssd_vgg16.prototxt", inp, "detection_out", "", scoreThreshold); } TEST_P(DNNTestNetwork, OpenPose_pose_coco) { - if (backend == DNN_BACKEND_HALIDE) throw SkipTestException(""); + if (backend == DNN_BACKEND_HALIDE || + backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD) + throw SkipTestException(""); processNet("dnn/openpose_pose_coco.caffemodel", "dnn/openpose_pose_coco.prototxt", Size(368, 368)); } TEST_P(DNNTestNetwork, OpenPose_pose_mpi) { - if (backend == DNN_BACKEND_HALIDE) throw SkipTestException(""); + if (backend == DNN_BACKEND_HALIDE || + backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD) + throw SkipTestException(""); processNet("dnn/openpose_pose_mpi.caffemodel", "dnn/openpose_pose_mpi.prototxt", Size(368, 368)); } TEST_P(DNNTestNetwork, OpenPose_pose_mpi_faster_4_stages) { - if (backend == DNN_BACKEND_HALIDE) throw SkipTestException(""); + if (backend == DNN_BACKEND_HALIDE || + backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD) + throw SkipTestException(""); // The same .caffemodel but modified .prototxt // See https://github.com/CMU-Perceptual-Computing-Lab/openpose/blob/master/src/openpose/pose/poseParameters.cpp processNet("dnn/openpose_pose_mpi.caffemodel", "dnn/openpose_pose_mpi_faster_4_stages.prototxt", @@ -226,11 +252,13 @@ TEST_P(DNNTestNetwork, OpenFace) TEST_P(DNNTestNetwork, opencv_face_detector) { - if (backend == DNN_BACKEND_HALIDE || - backend == DNN_BACKEND_INFERENCE_ENGINE && target != DNN_TARGET_CPU) + if (backend == DNN_BACKEND_HALIDE) throw SkipTestException(""); + Size inpSize; + if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD) + inpSize = Size(300, 300); Mat img = imread(findDataFile("gpu/lbpcascade/er.png", false)); - Mat inp = blobFromImage(img, 1.0, Size(), Scalar(104.0, 177.0, 123.0), false, false); + Mat inp = blobFromImage(img, 1.0, inpSize, Scalar(104.0, 177.0, 123.0), false, false); processNet("dnn/opencv_face_detector.caffemodel", "dnn/opencv_face_detector.prototxt", inp, "detection_out"); } @@ -238,12 +266,13 @@ TEST_P(DNNTestNetwork, opencv_face_detector) TEST_P(DNNTestNetwork, Inception_v2_SSD_TensorFlow) { if (backend == DNN_BACKEND_HALIDE || - backend == DNN_BACKEND_INFERENCE_ENGINE && target != DNN_TARGET_CPU) + (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_OPENCL) || + (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_OPENCL_FP16)) throw SkipTestException(""); Mat sample = imread(findDataFile("dnn/street.png", false)); Mat inp = blobFromImage(sample, 1.0f / 127.5, Size(300, 300), Scalar(127.5, 127.5, 127.5), false); - float l1 = (backend == DNN_BACKEND_DEFAULT && target == DNN_TARGET_OPENCL_FP16) ? 0.008 : 0.0; - float lInf = (backend == DNN_BACKEND_DEFAULT && target == DNN_TARGET_OPENCL_FP16) ? 0.07 : 0.0; + float l1 = (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_MYRIAD) ? 0.008 : 0.0; + float lInf = (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_MYRIAD) ? 0.07 : 0.0; processNet("dnn/ssd_inception_v2_coco_2017_11_17.pb", "dnn/ssd_inception_v2_coco_2017_11_17.pbtxt", inp, "detection_out", "", l1, lInf); } @@ -252,7 +281,8 @@ TEST_P(DNNTestNetwork, DenseNet_121) { if ((backend == DNN_BACKEND_HALIDE) || (backend == DNN_BACKEND_DEFAULT && target == DNN_TARGET_OPENCL_FP16) || - (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_OPENCL_FP16)) + (backend == DNN_BACKEND_INFERENCE_ENGINE && (target == DNN_TARGET_OPENCL_FP16 || + target == DNN_TARGET_MYRIAD))) throw SkipTestException(""); processNet("dnn/DenseNet_121.caffemodel", "dnn/DenseNet_121.prototxt", Size(224, 224), "", "caffe"); } @@ -266,6 +296,7 @@ const tuple testCases[] = { tuple(DNN_BACKEND_INFERENCE_ENGINE, DNN_TARGET_CPU), tuple(DNN_BACKEND_INFERENCE_ENGINE, DNN_TARGET_OPENCL), tuple(DNN_BACKEND_INFERENCE_ENGINE, DNN_TARGET_OPENCL_FP16), + tuple(DNN_BACKEND_INFERENCE_ENGINE, DNN_TARGET_MYRIAD), #endif tuple(DNN_BACKEND_DEFAULT, DNN_TARGET_OPENCL), tuple(DNN_BACKEND_DEFAULT, DNN_TARGET_OPENCL_FP16) diff --git a/modules/dnn/test/test_common.hpp b/modules/dnn/test/test_common.hpp index 872d19dce4..8e8ea74d83 100644 --- a/modules/dnn/test/test_common.hpp +++ b/modules/dnn/test/test_common.hpp @@ -147,6 +147,28 @@ inline void normAssertDetections(cv::Mat ref, cv::Mat out, const char *comment = testBoxes, comment, confThreshold, scores_diff, boxes_iou_diff); } +inline bool checkMyriadTarget() +{ +#ifndef HAVE_INF_ENGINE + return false; +#endif + cv::dnn::Net net; + cv::dnn::LayerParams lp; + net.addLayerToPrev("testLayer", "Identity", lp); + net.setPreferableBackend(cv::dnn::DNN_BACKEND_INFERENCE_ENGINE); + net.setPreferableTarget(cv::dnn::DNN_TARGET_MYRIAD); + net.setInput(cv::Mat::zeros(1, 1, CV_32FC1)); + try + { + net.forward(); + } + catch(...) + { + return false; + } + return true; +} + inline bool readFileInMemory(const std::string& filename, std::string& content) { std::ios::openmode mode = std::ios::in | std::ios::binary; diff --git a/modules/dnn/test/test_darknet_importer.cpp b/modules/dnn/test/test_darknet_importer.cpp index a7679daf6f..11d2e50ef8 100644 --- a/modules/dnn/test/test_darknet_importer.cpp +++ b/modules/dnn/test/test_darknet_importer.cpp @@ -71,13 +71,31 @@ static void testDarknetModel(const std::string& cfg, const std::string& weights, const std::vector& refClassIds, const std::vector& refConfidences, const std::vector& refBoxes, - int targetId, float confThreshold = 0.24) + int backendId, int targetId, float scoreDiff = 0.0, + float iouDiff = 0.0, float confThreshold = 0.24) { + if (backendId == DNN_BACKEND_DEFAULT && targetId == DNN_TARGET_OPENCL) + { + #ifdef HAVE_OPENCL + if (!cv::ocl::useOpenCL()) + #endif + { + throw SkipTestException("OpenCL is not available/disabled in OpenCV"); + } + } + if (backendId == DNN_BACKEND_INFERENCE_ENGINE && targetId == DNN_TARGET_MYRIAD) + { + if (!checkMyriadTarget()) + { + throw SkipTestException("Myriad is not available/disabled in OpenCV"); + } + } Mat sample = imread(_tf("dog416.png")); Mat inp = blobFromImage(sample, 1.0/255, Size(416, 416), Scalar(), true, false); Net net = readNet(findDataFile("dnn/" + cfg, false), findDataFile("dnn/" + weights, false)); + net.setPreferableBackend(backendId); net.setPreferableTarget(targetId); net.setInput(inp); std::vector outs; @@ -108,14 +126,17 @@ static void testDarknetModel(const std::string& cfg, const std::string& weights, } } normAssertDetections(refClassIds, refConfidences, refBoxes, classIds, - confidences, boxes, "", confThreshold, 8e-5, 3e-5); + confidences, boxes, "", confThreshold, scoreDiff, iouDiff); } -typedef testing::TestWithParam Test_Darknet_nets; +typedef testing::TestWithParam > Test_Darknet_nets; TEST_P(Test_Darknet_nets, YoloVoc) { - int targetId = GetParam(); + int backendId = get<0>(GetParam()); + int targetId = get<1>(GetParam()); + if (backendId == DNN_BACKEND_INFERENCE_ENGINE && targetId == DNN_TARGET_MYRIAD) + throw SkipTestException(""); std::vector outNames(1, "detection_out"); std::vector classIds(3); @@ -124,26 +145,34 @@ TEST_P(Test_Darknet_nets, YoloVoc) classIds[0] = 6; confidences[0] = 0.750469f; boxes[0] = Rect2d(0.577374, 0.127391, 0.325575, 0.173418); // a car classIds[1] = 1; confidences[1] = 0.780879f; boxes[1] = Rect2d(0.270762, 0.264102, 0.461713, 0.48131); // a bycicle classIds[2] = 11; confidences[2] = 0.901615f; boxes[2] = Rect2d(0.1386, 0.338509, 0.282737, 0.60028); // a dog + double scoreDiff = (targetId == DNN_TARGET_OPENCL_FP16 || targetId == DNN_TARGET_MYRIAD) ? 7e-3 : 8e-5; + double iouDiff = (targetId == DNN_TARGET_OPENCL_FP16 || targetId == DNN_TARGET_MYRIAD) ? 0.013 : 3e-5; testDarknetModel("yolo-voc.cfg", "yolo-voc.weights", outNames, - classIds, confidences, boxes, targetId); + classIds, confidences, boxes, backendId, targetId, scoreDiff, iouDiff); } TEST_P(Test_Darknet_nets, TinyYoloVoc) { - int targetId = GetParam(); + int backendId = get<0>(GetParam()); + int targetId = get<1>(GetParam()); std::vector outNames(1, "detection_out"); std::vector classIds(2); std::vector confidences(2); std::vector boxes(2); classIds[0] = 6; confidences[0] = 0.761967f; boxes[0] = Rect2d(0.579042, 0.159161, 0.31544, 0.160779); // a car classIds[1] = 11; confidences[1] = 0.780595f; boxes[1] = Rect2d(0.129696, 0.386467, 0.315579, 0.534527); // a dog + double scoreDiff = (targetId == DNN_TARGET_OPENCL_FP16 || targetId == DNN_TARGET_MYRIAD) ? 8e-3 : 8e-5; + double iouDiff = (targetId == DNN_TARGET_OPENCL_FP16 || targetId == DNN_TARGET_MYRIAD) ? 8e-3 : 3e-5; testDarknetModel("tiny-yolo-voc.cfg", "tiny-yolo-voc.weights", outNames, - classIds, confidences, boxes, targetId); + classIds, confidences, boxes, backendId, targetId, scoreDiff, iouDiff); } TEST_P(Test_Darknet_nets, YOLOv3) { - int targetId = GetParam(); + int backendId = get<0>(GetParam()); + int targetId = get<1>(GetParam()); + if (backendId == DNN_BACKEND_INFERENCE_ENGINE && targetId == DNN_TARGET_MYRIAD) + throw SkipTestException(""); std::vector outNames(3); outNames[0] = "yolo_82"; outNames[1] = "yolo_94"; @@ -155,11 +184,25 @@ TEST_P(Test_Darknet_nets, YOLOv3) classIds[0] = 7; confidences[0] = 0.952983f; boxes[0] = Rect2d(0.614622, 0.150257, 0.286747, 0.138994); // a truck classIds[1] = 1; confidences[1] = 0.987908f; boxes[1] = Rect2d(0.150913, 0.221933, 0.591342, 0.524327); // a bycicle classIds[2] = 16; confidences[2] = 0.998836f; boxes[2] = Rect2d(0.160024, 0.389964, 0.257861, 0.553752); // a dog (COCO) + double scoreDiff = (targetId == DNN_TARGET_OPENCL_FP16 || targetId == DNN_TARGET_MYRIAD) ? 4e-3 : 8e-5; + double iouDiff = (targetId == DNN_TARGET_OPENCL_FP16 || targetId == DNN_TARGET_MYRIAD) ? 0.011 : 3e-5; testDarknetModel("yolov3.cfg", "yolov3.weights", outNames, - classIds, confidences, boxes, targetId); + classIds, confidences, boxes, backendId, targetId, scoreDiff, iouDiff); } -INSTANTIATE_TEST_CASE_P(/**/, Test_Darknet_nets, availableDnnTargets()); +const tuple testCases[] = { +#ifdef HAVE_INF_ENGINE + tuple(DNN_BACKEND_INFERENCE_ENGINE, DNN_TARGET_CPU), + tuple(DNN_BACKEND_INFERENCE_ENGINE, DNN_TARGET_OPENCL), + tuple(DNN_BACKEND_INFERENCE_ENGINE, DNN_TARGET_OPENCL_FP16), + tuple(DNN_BACKEND_INFERENCE_ENGINE, DNN_TARGET_MYRIAD), +#endif + tuple(DNN_BACKEND_DEFAULT, DNN_TARGET_CPU), + tuple(DNN_BACKEND_DEFAULT, DNN_TARGET_OPENCL), + tuple(DNN_BACKEND_DEFAULT, DNN_TARGET_OPENCL_FP16) +}; + +INSTANTIATE_TEST_CASE_P(/**/, Test_Darknet_nets, testing::ValuesIn(testCases)); static void testDarknetLayer(const std::string& name, bool hasWeights = false) { diff --git a/modules/dnn/test/test_precomp.hpp b/modules/dnn/test/test_precomp.hpp index 54c9ce6c79..062308bf67 100644 --- a/modules/dnn/test/test_precomp.hpp +++ b/modules/dnn/test/test_precomp.hpp @@ -53,7 +53,7 @@ namespace opencv_test { using namespace cv::dnn; CV_ENUM(DNNBackend, DNN_BACKEND_DEFAULT, DNN_BACKEND_HALIDE, DNN_BACKEND_INFERENCE_ENGINE) -CV_ENUM(DNNTarget, DNN_TARGET_CPU, DNN_TARGET_OPENCL, DNN_TARGET_OPENCL_FP16) +CV_ENUM(DNNTarget, DNN_TARGET_CPU, DNN_TARGET_OPENCL, DNN_TARGET_OPENCL_FP16, DNN_TARGET_MYRIAD) static testing::internal::ParamGenerator availableDnnTargets() { diff --git a/samples/dnn/classification.cpp b/samples/dnn/classification.cpp index 9407326831..21e9520743 100644 --- a/samples/dnn/classification.cpp +++ b/samples/dnn/classification.cpp @@ -23,7 +23,7 @@ const char* keys = "{ backend | 0 | Choose one of computation backends: " "0: default C++ backend, " "1: Halide language (http://halide-lang.org/), " - "2: Intel's Deep Learning Inference Engine (https://software.seek.intel.com/deep-learning-deployment)}" + "2: Intel's Deep Learning Inference Engine (https://software.intel.com/openvino-toolkit)}" "{ target | 0 | Choose one of target computation devices: " "0: CPU target (by default)," "1: OpenCL }"; diff --git a/samples/dnn/classification.py b/samples/dnn/classification.py index 2628195929..637309fe25 100644 --- a/samples/dnn/classification.py +++ b/samples/dnn/classification.py @@ -34,7 +34,7 @@ parser.add_argument('--backend', choices=backends, default=cv.dnn.DNN_BACKEND_DE help="Choose one of computation backends: " "%d: default C++ backend, " "%d: Halide language (http://halide-lang.org/), " - "%d: Intel's Deep Learning Inference Engine (https://software.seek.intel.com/deep-learning-deployment)" % backends) + "%d: Intel's Deep Learning Inference Engine (https://software.intel.com/openvino-toolkit)" % backends) parser.add_argument('--target', choices=targets, default=cv.dnn.DNN_TARGET_CPU, type=int, help='Choose one of target computation devices: ' '%d: CPU target (by default), ' diff --git a/samples/dnn/object_detection.cpp b/samples/dnn/object_detection.cpp index 1298d7e39e..f2b761b387 100644 --- a/samples/dnn/object_detection.cpp +++ b/samples/dnn/object_detection.cpp @@ -25,7 +25,7 @@ const char* keys = "{ backend | 0 | Choose one of computation backends: " "0: default C++ backend, " "1: Halide language (http://halide-lang.org/), " - "2: Intel's Deep Learning Inference Engine (https://software.seek.intel.com/deep-learning-deployment)}" + "2: Intel's Deep Learning Inference Engine (https://software.intel.com/openvino-toolkit)}" "{ target | 0 | Choose one of target computation devices: " "0: CPU target (by default)," "1: OpenCL }"; diff --git a/samples/dnn/object_detection.py b/samples/dnn/object_detection.py index 01386f2363..a299b558e7 100644 --- a/samples/dnn/object_detection.py +++ b/samples/dnn/object_detection.py @@ -35,7 +35,7 @@ parser.add_argument('--backend', choices=backends, default=cv.dnn.DNN_BACKEND_DE help="Choose one of computation backends: " "%d: default C++ backend, " "%d: Halide language (http://halide-lang.org/), " - "%d: Intel's Deep Learning Inference Engine (https://software.seek.intel.com/deep-learning-deployment)" % backends) + "%d: Intel's Deep Learning Inference Engine (https://software.intel.com/openvino-toolkit)" % backends) parser.add_argument('--target', choices=targets, default=cv.dnn.DNN_TARGET_CPU, type=int, help='Choose one of target computation devices: ' '%d: CPU target (by default), ' diff --git a/samples/dnn/segmentation.cpp b/samples/dnn/segmentation.cpp index 252140a275..920e325b83 100644 --- a/samples/dnn/segmentation.cpp +++ b/samples/dnn/segmentation.cpp @@ -26,7 +26,7 @@ const char* keys = "{ backend | 0 | Choose one of computation backends: " "0: default C++ backend, " "1: Halide language (http://halide-lang.org/), " - "2: Intel's Deep Learning Inference Engine (https://software.seek.intel.com/deep-learning-deployment)}" + "2: Intel's Deep Learning Inference Engine (https://software.intel.com/openvino-toolkit)}" "{ target | 0 | Choose one of target computation devices: " "0: CPU target (by default)," "1: OpenCL }"; diff --git a/samples/dnn/segmentation.py b/samples/dnn/segmentation.py index 1a3c5b4553..3649bbbe22 100644 --- a/samples/dnn/segmentation.py +++ b/samples/dnn/segmentation.py @@ -36,7 +36,7 @@ parser.add_argument('--backend', choices=backends, default=cv.dnn.DNN_BACKEND_DE help="Choose one of computation backends: " "%d: default C++ backend, " "%d: Halide language (http://halide-lang.org/), " - "%d: Intel's Deep Learning Inference Engine (https://software.seek.intel.com/deep-learning-deployment)" % backends) + "%d: Intel's Deep Learning Inference Engine (https://software.intel.com/openvino-toolkit)" % backends) parser.add_argument('--target', choices=targets, default=cv.dnn.DNN_TARGET_CPU, type=int, help='Choose one of target computation devices: ' '%d: CPU target (by default), ' From ddba77229796e0129f36bb17b2a289f75740fabf Mon Sep 17 00:00:00 2001 From: Alexander Alekhin Date: Thu, 31 May 2018 14:07:22 +0300 Subject: [PATCH 13/40] videoio: fix isOpened() method for legacy APIs, fix "index" API list --- modules/videoio/src/cap.cpp | 2 +- modules/videoio/src/videoio_registry.cpp | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/modules/videoio/src/cap.cpp b/modules/videoio/src/cap.cpp index 693f32e6f2..c1ee87a15a 100644 --- a/modules/videoio/src/cap.cpp +++ b/modules/videoio/src/cap.cpp @@ -169,7 +169,7 @@ bool VideoCapture::isOpened() const { if (!icap.empty()) return icap->isOpened(); - return !icap.empty(); // legacy interface doesn't support closed files + return !cap.empty(); // legacy interface doesn't support closed files } void VideoCapture::release() diff --git a/modules/videoio/src/videoio_registry.cpp b/modules/videoio/src/videoio_registry.cpp index 484ebe30bc..c994c71a48 100644 --- a/modules/videoio/src/videoio_registry.cpp +++ b/modules/videoio/src/videoio_registry.cpp @@ -288,7 +288,7 @@ namespace videoio_registry { std::vector getAvailableBackends_CaptureByIndex() { - const std::vector result = VideoBackendRegistry::getInstance().getAvailableBackends_CaptureByFilename(); + const std::vector result = VideoBackendRegistry::getInstance().getAvailableBackends_CaptureByIndex(); return result; } std::vector getAvailableBackends_CaptureByFilename() From 22f435ff4d7f9df048672b4b5765ff4773e133cb Mon Sep 17 00:00:00 2001 From: Alexander Alekhin Date: Thu, 31 May 2018 13:37:13 +0300 Subject: [PATCH 14/40] videoio(v4l): fix code indentation, tabs --- modules/videoio/src/cap_v4l.cpp | 1508 +++++++++++++++---------------- 1 file changed, 754 insertions(+), 754 deletions(-) diff --git a/modules/videoio/src/cap_v4l.cpp b/modules/videoio/src/cap_v4l.cpp index f8afb41196..e292b7290a 100644 --- a/modules/videoio/src/cap_v4l.cpp +++ b/modules/videoio/src/cap_v4l.cpp @@ -261,8 +261,8 @@ namespace cv { /* V4L2 structure */ struct buffer { - void * start; - size_t length; + void * start; + size_t length; }; static unsigned int n_buffers = 0; @@ -277,64 +277,64 @@ struct CvCaptureCAM_V4L CV_FINAL : public CvCapture char *memoryMap; IplImage frame; - __u32 palette; - int width, height; - int bufferSize; - __u32 fps; - bool convert_rgb; - bool frame_allocated; - bool returnFrame; - - /* V4L2 variables */ - buffer buffers[MAX_V4L_BUFFERS + 1]; - v4l2_capability cap; - v4l2_input inp; - v4l2_format form; - v4l2_crop crop; - v4l2_cropcap cropcap; - v4l2_requestbuffers req; - v4l2_buf_type type; - v4l2_queryctrl queryctrl; - - timeval timestamp; - - /* V4L2 control variables */ - Range focus, brightness, contrast, saturation, hue, gain, exposure; - - bool open(int _index); - bool open(const char* deviceName); - - virtual double getProperty(int) const CV_OVERRIDE; - virtual bool setProperty(int, double) CV_OVERRIDE; - virtual bool grabFrame() CV_OVERRIDE; - virtual IplImage* retrieveFrame(int) CV_OVERRIDE; - - Range getRange(int property_id) const { - switch (property_id) { - case CV_CAP_PROP_BRIGHTNESS: - return brightness; - case CV_CAP_PROP_CONTRAST: - return contrast; - case CV_CAP_PROP_SATURATION: - return saturation; - case CV_CAP_PROP_HUE: - return hue; - case CV_CAP_PROP_GAIN: - return gain; - case CV_CAP_PROP_EXPOSURE: - return exposure; - case CV_CAP_PROP_FOCUS: - return focus; - case CV_CAP_PROP_AUTOFOCUS: - return Range(0, 1); - case CV_CAP_PROP_AUTO_EXPOSURE: - return Range(0, 4); - default: - return Range(0, 255); - } - } - - virtual ~CvCaptureCAM_V4L(); + __u32 palette; + int width, height; + int bufferSize; + __u32 fps; + bool convert_rgb; + bool frame_allocated; + bool returnFrame; + + /* V4L2 variables */ + buffer buffers[MAX_V4L_BUFFERS + 1]; + v4l2_capability cap; + v4l2_input inp; + v4l2_format form; + v4l2_crop crop; + v4l2_cropcap cropcap; + v4l2_requestbuffers req; + v4l2_buf_type type; + v4l2_queryctrl queryctrl; + + timeval timestamp; + + /* V4L2 control variables */ + Range focus, brightness, contrast, saturation, hue, gain, exposure; + + bool open(int _index); + bool open(const char* deviceName); + + virtual double getProperty(int) const CV_OVERRIDE; + virtual bool setProperty(int, double) CV_OVERRIDE; + virtual bool grabFrame() CV_OVERRIDE; + virtual IplImage* retrieveFrame(int) CV_OVERRIDE; + + Range getRange(int property_id) const { + switch (property_id) { + case CV_CAP_PROP_BRIGHTNESS: + return brightness; + case CV_CAP_PROP_CONTRAST: + return contrast; + case CV_CAP_PROP_SATURATION: + return saturation; + case CV_CAP_PROP_HUE: + return hue; + case CV_CAP_PROP_GAIN: + return gain; + case CV_CAP_PROP_EXPOSURE: + return exposure; + case CV_CAP_PROP_FOCUS: + return focus; + case CV_CAP_PROP_AUTOFOCUS: + return Range(0, 1); + case CV_CAP_PROP_AUTO_EXPOSURE: + return Range(0, 4); + default: + return Range(0, 255); + } + } + + virtual ~CvCaptureCAM_V4L(); }; static void icvCloseCAM_V4L( CvCaptureCAM_V4L* capture ); @@ -360,99 +360,99 @@ CvCaptureCAM_V4L::~CvCaptureCAM_V4L() { Returns the global numCameras with the correct value (we hope) */ static void icvInitCapture_V4L() { - int deviceHandle; - int CameraNumber; - char deviceName[MAX_DEVICE_DRIVER_NAME]; - - CameraNumber = 0; - while(CameraNumber < MAX_CAMERAS) { - /* Print the CameraNumber at the end of the string with a width of one character */ - sprintf(deviceName, "/dev/video%1d", CameraNumber); - /* Test using an open to see if this new device name really does exists. */ - deviceHandle = open(deviceName, O_RDONLY); - if (deviceHandle != -1) { - /* This device does indeed exist - add it to the total so far */ - // add indexList - indexList|=(1 << CameraNumber); - numCameras++; - } - if (deviceHandle != -1) - close(deviceHandle); - /* Set up to test the next /dev/video source in line */ - CameraNumber++; - } /* End while */ + int deviceHandle; + int CameraNumber; + char deviceName[MAX_DEVICE_DRIVER_NAME]; + + CameraNumber = 0; + while(CameraNumber < MAX_CAMERAS) { + /* Print the CameraNumber at the end of the string with a width of one character */ + sprintf(deviceName, "/dev/video%1d", CameraNumber); + /* Test using an open to see if this new device name really does exists. */ + deviceHandle = open(deviceName, O_RDONLY); + if (deviceHandle != -1) { + /* This device does indeed exist - add it to the total so far */ + // add indexList + indexList|=(1 << CameraNumber); + numCameras++; + } + if (deviceHandle != -1) + close(deviceHandle); + /* Set up to test the next /dev/video source in line */ + CameraNumber++; + } /* End while */ }; /* End icvInitCapture_V4L */ static bool try_palette_v4l2(CvCaptureCAM_V4L* capture) { - capture->form = v4l2_format(); - capture->form.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - capture->form.fmt.pix.pixelformat = capture->palette; - capture->form.fmt.pix.field = V4L2_FIELD_ANY; - capture->form.fmt.pix.width = capture->width; - capture->form.fmt.pix.height = capture->height; - - if (-1 == ioctl (capture->deviceHandle, VIDIOC_S_FMT, &capture->form)) - return false; + capture->form = v4l2_format(); + capture->form.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + capture->form.fmt.pix.pixelformat = capture->palette; + capture->form.fmt.pix.field = V4L2_FIELD_ANY; + capture->form.fmt.pix.width = capture->width; + capture->form.fmt.pix.height = capture->height; + + if (-1 == ioctl (capture->deviceHandle, VIDIOC_S_FMT, &capture->form)) + return false; - return capture->palette == capture->form.fmt.pix.pixelformat; + return capture->palette == capture->form.fmt.pix.pixelformat; } static int try_init_v4l2(CvCaptureCAM_V4L* capture, const char *deviceName) { - // Test device for V4L2 compatibility - // Return value: - // -1 then unable to open device - // 0 then detected nothing - // 1 then V4L2 device - - int deviceIndex; - - /* Open and test V4L2 device */ - capture->deviceHandle = open (deviceName, O_RDWR /* required */ | O_NONBLOCK, 0); - if (-1 == capture->deviceHandle) - { + // Test device for V4L2 compatibility + // Return value: + // -1 then unable to open device + // 0 then detected nothing + // 1 then V4L2 device + + int deviceIndex; + + /* Open and test V4L2 device */ + capture->deviceHandle = open (deviceName, O_RDWR /* required */ | O_NONBLOCK, 0); + if (-1 == capture->deviceHandle) + { #ifndef NDEBUG - fprintf(stderr, "(DEBUG) try_init_v4l2 open \"%s\": %s\n", deviceName, strerror(errno)); + fprintf(stderr, "(DEBUG) try_init_v4l2 open \"%s\": %s\n", deviceName, strerror(errno)); #endif - icvCloseCAM_V4L(capture); - return -1; - } + icvCloseCAM_V4L(capture); + return -1; + } - capture->cap = v4l2_capability(); - if (-1 == ioctl (capture->deviceHandle, VIDIOC_QUERYCAP, &capture->cap)) - { + capture->cap = v4l2_capability(); + if (-1 == ioctl (capture->deviceHandle, VIDIOC_QUERYCAP, &capture->cap)) + { #ifndef NDEBUG - fprintf(stderr, "(DEBUG) try_init_v4l2 VIDIOC_QUERYCAP \"%s\": %s\n", deviceName, strerror(errno)); + fprintf(stderr, "(DEBUG) try_init_v4l2 VIDIOC_QUERYCAP \"%s\": %s\n", deviceName, strerror(errno)); #endif - icvCloseCAM_V4L(capture); - return 0; - } + icvCloseCAM_V4L(capture); + return 0; + } - /* Query channels number */ - if (-1 == ioctl (capture->deviceHandle, VIDIOC_G_INPUT, &deviceIndex)) - { + /* Query channels number */ + if (-1 == ioctl (capture->deviceHandle, VIDIOC_G_INPUT, &deviceIndex)) + { #ifndef NDEBUG - fprintf(stderr, "(DEBUG) try_init_v4l2 VIDIOC_G_INPUT \"%s\": %s\n", deviceName, strerror(errno)); + fprintf(stderr, "(DEBUG) try_init_v4l2 VIDIOC_G_INPUT \"%s\": %s\n", deviceName, strerror(errno)); #endif - icvCloseCAM_V4L(capture); - return 0; - } + icvCloseCAM_V4L(capture); + return 0; + } - /* Query information about current input */ - capture->inp = v4l2_input(); - capture->inp.index = deviceIndex; - if (-1 == ioctl (capture->deviceHandle, VIDIOC_ENUMINPUT, &capture->inp)) - { + /* Query information about current input */ + capture->inp = v4l2_input(); + capture->inp.index = deviceIndex; + if (-1 == ioctl (capture->deviceHandle, VIDIOC_ENUMINPUT, &capture->inp)) + { #ifndef NDEBUG - fprintf(stderr, "(DEBUG) try_init_v4l2 VIDIOC_ENUMINPUT \"%s\": %s\n", deviceName, strerror(errno)); + fprintf(stderr, "(DEBUG) try_init_v4l2 VIDIOC_ENUMINPUT \"%s\": %s\n", deviceName, strerror(errno)); #endif - icvCloseCAM_V4L(capture); - return 0; - } + icvCloseCAM_V4L(capture); + return 0; + } - return 1; + return 1; } @@ -536,22 +536,22 @@ static void v4l2_control_range(CvCaptureCAM_V4L* cap, __u32 id) static void v4l2_scan_controls(CvCaptureCAM_V4L* capture) { - __u32 ctrl_id; + __u32 ctrl_id; - for (ctrl_id = V4L2_CID_BASE; ctrl_id < V4L2_CID_LASTP1; ctrl_id++) - { - v4l2_control_range(capture, ctrl_id); - } + for (ctrl_id = V4L2_CID_BASE; ctrl_id < V4L2_CID_LASTP1; ctrl_id++) + { + v4l2_control_range(capture, ctrl_id); + } - for (ctrl_id = V4L2_CID_PRIVATE_BASE;;ctrl_id++) - { - v4l2_control_range(capture, ctrl_id); + for (ctrl_id = V4L2_CID_PRIVATE_BASE;;ctrl_id++) + { + v4l2_control_range(capture, ctrl_id); - if (errno == EINVAL) - break; - } + if (errno == EINVAL) + break; + } - v4l2_control_range(capture, V4L2_CID_FOCUS_ABSOLUTE); + v4l2_control_range(capture, V4L2_CID_FOCUS_ABSOLUTE); } static int v4l2_set_fps(CvCaptureCAM_V4L* capture) { @@ -619,156 +619,156 @@ static void v4l2_create_frame(CvCaptureCAM_V4L *capture) { static int _capture_V4L2 (CvCaptureCAM_V4L *capture) { - const char* deviceName = capture->deviceName.c_str(); - if (try_init_v4l2(capture, deviceName) != 1) { - /* init of the v4l2 device is not OK */ - return -1; - } - - /* V4L2 control variables are zero (memset above) */ - - /* Scan V4L2 controls */ - v4l2_scan_controls(capture); - - if ((capture->cap.capabilities & V4L2_CAP_VIDEO_CAPTURE) == 0) { - /* Nope. */ - fprintf( stderr, "VIDEOIO ERROR: V4L2: device %s is unable to capture video memory.\n",deviceName); - icvCloseCAM_V4L(capture); - return -1; - } - - /* The following code sets the CHANNEL_NUMBER of the video input. Some video sources - have sub "Channel Numbers". For a typical V4L TV capture card, this is usually 1. - I myself am using a simple NTSC video input capture card that uses the value of 1. - If you are not in North America or have a different video standard, you WILL have to change - the following settings and recompile/reinstall. This set of settings is based on - the most commonly encountered input video source types (like my bttv card) */ - - if(capture->inp.index > 0) { - capture->inp = v4l2_input(); - capture->inp.index = CHANNEL_NUMBER; - /* Set only channel number to CHANNEL_NUMBER */ - /* V4L2 have a status field from selected video mode */ - if (-1 == ioctl (capture->deviceHandle, VIDIOC_ENUMINPUT, &capture->inp)) - { - fprintf (stderr, "VIDEOIO ERROR: V4L2: Aren't able to set channel number\n"); - icvCloseCAM_V4L (capture); - return -1; - } - } /* End if */ - - /* Find Window info */ - capture->form = v4l2_format(); - capture->form.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - - if (-1 == ioctl (capture->deviceHandle, VIDIOC_G_FMT, &capture->form)) { - fprintf( stderr, "VIDEOIO ERROR: V4L2: Could not obtain specifics of capture window.\n\n"); - icvCloseCAM_V4L(capture); - return -1; - } - - if (autosetup_capture_mode_v4l2(capture) == -1) - return -1; - - /* try to set framerate */ - v4l2_set_fps(capture); - - unsigned int min; - - /* Buggy driver paranoia. */ - min = capture->form.fmt.pix.width * 2; - - if (capture->form.fmt.pix.bytesperline < min) - capture->form.fmt.pix.bytesperline = min; - - min = capture->form.fmt.pix.bytesperline * capture->form.fmt.pix.height; - - if (capture->form.fmt.pix.sizeimage < min) - capture->form.fmt.pix.sizeimage = min; - - capture->req = v4l2_requestbuffers(); - - unsigned int buffer_number = capture->bufferSize; - - try_again: - - capture->req.count = buffer_number; - capture->req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - capture->req.memory = V4L2_MEMORY_MMAP; - - if (-1 == ioctl (capture->deviceHandle, VIDIOC_REQBUFS, &capture->req)) - { - if (EINVAL == errno) - { - fprintf (stderr, "%s does not support memory mapping\n", deviceName); - } else { - perror ("VIDIOC_REQBUFS"); - } - /* free capture, and returns an error code */ - icvCloseCAM_V4L (capture); - return -1; - } - - if (capture->req.count < buffer_number) - { - if (buffer_number == 1) - { - fprintf (stderr, "Insufficient buffer memory on %s\n", deviceName); - - /* free capture, and returns an error code */ - icvCloseCAM_V4L (capture); - return -1; - } else { - buffer_number--; - fprintf (stderr, "Insufficient buffer memory on %s -- decreaseing buffers\n", deviceName); - - goto try_again; - } - } - - for (n_buffers = 0; n_buffers < capture->req.count; ++n_buffers) - { - v4l2_buffer buf = v4l2_buffer(); - buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - buf.memory = V4L2_MEMORY_MMAP; - buf.index = n_buffers; - - if (-1 == ioctl (capture->deviceHandle, VIDIOC_QUERYBUF, &buf)) { - perror ("VIDIOC_QUERYBUF"); - - /* free capture, and returns an error code */ - icvCloseCAM_V4L (capture); - return -1; - } - - capture->buffers[n_buffers].length = buf.length; - capture->buffers[n_buffers].start = - mmap (NULL /* start anywhere */, - buf.length, - PROT_READ | PROT_WRITE /* required */, - MAP_SHARED /* recommended */, - capture->deviceHandle, buf.m.offset); - - if (MAP_FAILED == capture->buffers[n_buffers].start) { - perror ("mmap"); - - /* free capture, and returns an error code */ - icvCloseCAM_V4L (capture); - return -1; - } - - if (n_buffers == 0) { - capture->buffers[MAX_V4L_BUFFERS].start = malloc( buf.length ); - capture->buffers[MAX_V4L_BUFFERS].length = buf.length; - } - } - - v4l2_create_frame(capture); - - // reinitialize buffers - capture->FirstCapture = 1; - - return 1; + const char* deviceName = capture->deviceName.c_str(); + if (try_init_v4l2(capture, deviceName) != 1) { + /* init of the v4l2 device is not OK */ + return -1; + } + + /* V4L2 control variables are zero (memset above) */ + + /* Scan V4L2 controls */ + v4l2_scan_controls(capture); + + if ((capture->cap.capabilities & V4L2_CAP_VIDEO_CAPTURE) == 0) { + /* Nope. */ + fprintf( stderr, "VIDEOIO ERROR: V4L2: device %s is unable to capture video memory.\n",deviceName); + icvCloseCAM_V4L(capture); + return -1; + } + + /* The following code sets the CHANNEL_NUMBER of the video input. Some video sources + have sub "Channel Numbers". For a typical V4L TV capture card, this is usually 1. + I myself am using a simple NTSC video input capture card that uses the value of 1. + If you are not in North America or have a different video standard, you WILL have to change + the following settings and recompile/reinstall. This set of settings is based on + the most commonly encountered input video source types (like my bttv card) */ + + if(capture->inp.index > 0) { + capture->inp = v4l2_input(); + capture->inp.index = CHANNEL_NUMBER; + /* Set only channel number to CHANNEL_NUMBER */ + /* V4L2 have a status field from selected video mode */ + if (-1 == ioctl (capture->deviceHandle, VIDIOC_ENUMINPUT, &capture->inp)) + { + fprintf (stderr, "VIDEOIO ERROR: V4L2: Aren't able to set channel number\n"); + icvCloseCAM_V4L (capture); + return -1; + } + } /* End if */ + + /* Find Window info */ + capture->form = v4l2_format(); + capture->form.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + + if (-1 == ioctl (capture->deviceHandle, VIDIOC_G_FMT, &capture->form)) { + fprintf( stderr, "VIDEOIO ERROR: V4L2: Could not obtain specifics of capture window.\n\n"); + icvCloseCAM_V4L(capture); + return -1; + } + + if (autosetup_capture_mode_v4l2(capture) == -1) + return -1; + + /* try to set framerate */ + v4l2_set_fps(capture); + + unsigned int min; + + /* Buggy driver paranoia. */ + min = capture->form.fmt.pix.width * 2; + + if (capture->form.fmt.pix.bytesperline < min) + capture->form.fmt.pix.bytesperline = min; + + min = capture->form.fmt.pix.bytesperline * capture->form.fmt.pix.height; + + if (capture->form.fmt.pix.sizeimage < min) + capture->form.fmt.pix.sizeimage = min; + + capture->req = v4l2_requestbuffers(); + + unsigned int buffer_number = capture->bufferSize; + +try_again: + + capture->req.count = buffer_number; + capture->req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + capture->req.memory = V4L2_MEMORY_MMAP; + + if (-1 == ioctl (capture->deviceHandle, VIDIOC_REQBUFS, &capture->req)) + { + if (EINVAL == errno) + { + fprintf (stderr, "%s does not support memory mapping\n", deviceName); + } else { + perror ("VIDIOC_REQBUFS"); + } + /* free capture, and returns an error code */ + icvCloseCAM_V4L (capture); + return -1; + } + + if (capture->req.count < buffer_number) + { + if (buffer_number == 1) + { + fprintf (stderr, "Insufficient buffer memory on %s\n", deviceName); + + /* free capture, and returns an error code */ + icvCloseCAM_V4L (capture); + return -1; + } else { + buffer_number--; + fprintf (stderr, "Insufficient buffer memory on %s -- decreaseing buffers\n", deviceName); + + goto try_again; + } + } + + for (n_buffers = 0; n_buffers < capture->req.count; ++n_buffers) + { + v4l2_buffer buf = v4l2_buffer(); + buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + buf.memory = V4L2_MEMORY_MMAP; + buf.index = n_buffers; + + if (-1 == ioctl (capture->deviceHandle, VIDIOC_QUERYBUF, &buf)) { + perror ("VIDIOC_QUERYBUF"); + + /* free capture, and returns an error code */ + icvCloseCAM_V4L (capture); + return -1; + } + + capture->buffers[n_buffers].length = buf.length; + capture->buffers[n_buffers].start = + mmap (NULL /* start anywhere */, + buf.length, + PROT_READ | PROT_WRITE /* required */, + MAP_SHARED /* recommended */, + capture->deviceHandle, buf.m.offset); + + if (MAP_FAILED == capture->buffers[n_buffers].start) { + perror ("mmap"); + + /* free capture, and returns an error code */ + icvCloseCAM_V4L (capture); + return -1; + } + + if (n_buffers == 0) { + capture->buffers[MAX_V4L_BUFFERS].start = malloc( buf.length ); + capture->buffers[MAX_V4L_BUFFERS].length = buf.length; + } + } + + v4l2_create_frame(capture); + + // reinitialize buffers + capture->FirstCapture = 1; + + return 1; }; /* End _capture_V4L2 */ /** @@ -785,35 +785,35 @@ static bool v4l2_reset( CvCaptureCAM_V4L* capture) { bool CvCaptureCAM_V4L::open(int _index) { - int autoindex = 0; - char _deviceName[MAX_DEVICE_DRIVER_NAME]; - - if (!numCameras) - icvInitCapture_V4L(); /* Haven't called icvInitCapture yet - do it now! */ - if (!numCameras) - return false; /* Are there any /dev/video input sources? */ - - //search index in indexList - if ( (_index>-1) && ! ((1 << _index) & indexList) ) - { - fprintf( stderr, "VIDEOIO ERROR: V4L: index %d is not correct!\n",_index); - return false; /* Did someone ask for not correct video source number? */ - } - - /* Select camera, or rather, V4L video source */ - if (_index<0) { // Asking for the first device available - for (; autoindex-1) && ! ((1 << _index) & indexList) ) + { + fprintf( stderr, "VIDEOIO ERROR: V4L: index %d is not correct!\n",_index); + return false; /* Did someone ask for not correct video source number? */ + } + + /* Select camera, or rather, V4L video source */ + if (_index<0) { // Asking for the first device available + for (; autoindexdeviceHandle, VIDIOC_QBUF, &buf) == -1) - { + if (!(buf.flags & (V4L2_BUF_FLAG_QUEUED | V4L2_BUF_FLAG_DONE))) + { + if (ioctl(capture->deviceHandle, VIDIOC_QBUF, &buf) == -1) + { + return 0; + } + } return 0; - } - } - return 0; default: /* display the error and stop processing */ @@ -857,24 +857,24 @@ static int read_frame_v4l2(CvCaptureCAM_V4L* capture) { perror ("VIDIOC_DQBUF"); return -1; } - } + } - assert(buf.index < capture->req.count); + assert(buf.index < capture->req.count); - memcpy(capture->buffers[MAX_V4L_BUFFERS].start, - capture->buffers[buf.index].start, - capture->buffers[MAX_V4L_BUFFERS].length ); - capture->bufferIndex = MAX_V4L_BUFFERS; - //printf("got data in buff %d, len=%d, flags=0x%X, seq=%d, used=%d)\n", - // buf.index, buf.length, buf.flags, buf.sequence, buf.bytesused); + memcpy(capture->buffers[MAX_V4L_BUFFERS].start, + capture->buffers[buf.index].start, + capture->buffers[MAX_V4L_BUFFERS].length ); + capture->bufferIndex = MAX_V4L_BUFFERS; + //printf("got data in buff %d, len=%d, flags=0x%X, seq=%d, used=%d)\n", + // buf.index, buf.length, buf.flags, buf.sequence, buf.bytesused); - //set timestamp in capture struct to be timestamp of most recent frame - capture->timestamp = buf.timestamp; + //set timestamp in capture struct to be timestamp of most recent frame + capture->timestamp = buf.timestamp; - if (-1 == ioctl (capture->deviceHandle, VIDIOC_QBUF, &buf)) - perror ("VIDIOC_QBUF"); + if (-1 == ioctl (capture->deviceHandle, VIDIOC_QBUF, &buf)) + perror ("VIDIOC_QBUF"); - return 1; + return 1; } static int mainloop_v4l2(CvCaptureCAM_V4L* capture) { @@ -922,55 +922,55 @@ static int mainloop_v4l2(CvCaptureCAM_V4L* capture) { } static bool icvGrabFrameCAM_V4L(CvCaptureCAM_V4L* capture) { - if (capture->FirstCapture) { - /* Some general initialization must take place the first time through */ + if (capture->FirstCapture) { + /* Some general initialization must take place the first time through */ - /* This is just a technicality, but all buffers must be filled up before any + /* This is just a technicality, but all buffers must be filled up before any staggered SYNC is applied. SO, filler up. (see V4L HowTo) */ - { - - for (capture->bufferIndex = 0; - capture->bufferIndex < ((int)capture->req.count); - ++capture->bufferIndex) { - v4l2_buffer buf = v4l2_buffer(); + for (capture->bufferIndex = 0; + capture->bufferIndex < ((int)capture->req.count); + ++capture->bufferIndex) + { - buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - buf.memory = V4L2_MEMORY_MMAP; - buf.index = (unsigned long)capture->bufferIndex; + v4l2_buffer buf = v4l2_buffer(); - if (-1 == ioctl (capture->deviceHandle, VIDIOC_QBUF, &buf)) { - perror ("VIDIOC_QBUF"); - return false; - } - } + buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + buf.memory = V4L2_MEMORY_MMAP; + buf.index = (unsigned long)capture->bufferIndex; - /* enable the streaming */ - capture->type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - if (-1 == ioctl (capture->deviceHandle, VIDIOC_STREAMON, - &capture->type)) { - /* error enabling the stream */ - perror ("VIDIOC_STREAMON"); - return false; + if (-1 == ioctl (capture->deviceHandle, VIDIOC_QBUF, &buf)) { + perror ("VIDIOC_QBUF"); + return false; + } + } + + /* enable the streaming */ + capture->type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + if (-1 == ioctl (capture->deviceHandle, VIDIOC_STREAMON, + &capture->type)) { + /* error enabling the stream */ + perror ("VIDIOC_STREAMON"); + return false; + } } - } #if defined(V4L_ABORT_BADJPEG) // skip first frame. it is often bad -- this is unnotied in traditional apps, // but could be fatal if bad jpeg is enabled if(mainloop_v4l2(capture) != 1) - return false; + return false; #endif - /* preparation is ok */ - capture->FirstCapture = 0; - } + /* preparation is ok */ + capture->FirstCapture = 0; + } - if(mainloop_v4l2(capture) != 1) return false; + if(mainloop_v4l2(capture) != 1) return false; - return true; + return true; } /* @@ -1004,7 +1004,7 @@ static bool icvGrabFrameCAM_V4L(CvCaptureCAM_V4L* capture) { static inline void move_411_block(int yTL, int yTR, int yBL, int yBR, int u, int v, - int /*rowPixels*/, unsigned char * rgb) + int /*rowPixels*/, unsigned char * rgb) { const int rvScale = 91881; const int guScale = -22553; @@ -1014,13 +1014,13 @@ move_411_block(int yTL, int yTR, int yBL, int yBR, int u, int v, int r, g, b; g = guScale * u + gvScale * v; -// if (force_rgb) { -// r = buScale * u; -// b = rvScale * v; -// } else { - r = rvScale * v; - b = buScale * u; -// } + // if (force_rgb) { + // r = buScale * u; + // b = rvScale * v; + // } else { + r = rvScale * v; + b = buScale * u; + // } yTL *= yScale; yTR *= yScale; yBL *= yScale; yBR *= yScale; @@ -1046,7 +1046,7 @@ static inline void yuv420p_to_rgb24(int width, int height, uchar* src, uchar* dst) { cvtColor(Mat(height * 3 / 2, width, CV_8U, src), Mat(height, width, CV_8UC3, dst), - COLOR_YUV2BGR_YV12); + COLOR_YUV2BGR_YV12); } // Consider a YUV411P image of 8x2 pixels. @@ -1066,7 +1066,7 @@ yuv420p_to_rgb24(int width, int height, uchar* src, uchar* dst) /* [FD] untested... */ static void yuv411p_to_rgb24(int width, int height, - unsigned char *pIn0, unsigned char *pOut0) + unsigned char *pIn0, unsigned char *pOut0) { const int numpix = width * height; const int bytes = 24 >> 3; @@ -1086,7 +1086,7 @@ yuv411p_to_rgb24(int width, int height, v = (*pV++) - 128; move_411_block(y00, y01, y10, y11, u, v, - width, pOut); + width, pOut); pY += 4; pOut += 4 * bytes; @@ -1099,14 +1099,14 @@ yuv411p_to_rgb24(int width, int height, static void yuyv_to_rgb24(int width, int height, unsigned char* src, unsigned char* dst) { cvtColor(Mat(height, width, CV_8UC2, src), Mat(height, width, CV_8UC3, dst), - COLOR_YUV2BGR_YUYV); + COLOR_YUV2BGR_YUYV); } static inline void uyvy_to_rgb24 (int width, int height, unsigned char *src, unsigned char *dst) { cvtColor(Mat(height, width, CV_8UC2, src), Mat(height, width, CV_8UC3, dst), - COLOR_YUV2BGR_UYVY); + COLOR_YUV2BGR_UYVY); } static inline void @@ -1147,64 +1147,64 @@ static void bayer2rgb24(long int WIDTH, long int HEIGHT, unsigned char *src, uns size = WIDTH*HEIGHT; for ( i = 0; i < size; i++ ) { - if ( (i/WIDTH) % 2 == 0 ) { - if ( (i % 2) == 0 ) { - /* B */ - if ( (i > WIDTH) && ((i % WIDTH) > 0) ) { - *scanpt++ = (*(rawpt-WIDTH-1)+*(rawpt-WIDTH+1)+ - *(rawpt+WIDTH-1)+*(rawpt+WIDTH+1))/4; /* R */ - *scanpt++ = (*(rawpt-1)+*(rawpt+1)+ - *(rawpt+WIDTH)+*(rawpt-WIDTH))/4; /* G */ - *scanpt++ = *rawpt; /* B */ - } else { - /* first line or left column */ - *scanpt++ = *(rawpt+WIDTH+1); /* R */ - *scanpt++ = (*(rawpt+1)+*(rawpt+WIDTH))/2; /* G */ - *scanpt++ = *rawpt; /* B */ - } - } else { - /* (B)G */ - if ( (i > WIDTH) && ((i % WIDTH) < (WIDTH-1)) ) { - *scanpt++ = (*(rawpt+WIDTH)+*(rawpt-WIDTH))/2; /* R */ - *scanpt++ = *rawpt; /* G */ - *scanpt++ = (*(rawpt-1)+*(rawpt+1))/2; /* B */ - } else { - /* first line or right column */ - *scanpt++ = *(rawpt+WIDTH); /* R */ - *scanpt++ = *rawpt; /* G */ - *scanpt++ = *(rawpt-1); /* B */ - } - } - } else { - if ( (i % 2) == 0 ) { - /* G(R) */ - if ( (i < (WIDTH*(HEIGHT-1))) && ((i % WIDTH) > 0) ) { - *scanpt++ = (*(rawpt-1)+*(rawpt+1))/2; /* R */ - *scanpt++ = *rawpt; /* G */ - *scanpt++ = (*(rawpt+WIDTH)+*(rawpt-WIDTH))/2; /* B */ - } else { - /* bottom line or left column */ - *scanpt++ = *(rawpt+1); /* R */ - *scanpt++ = *rawpt; /* G */ - *scanpt++ = *(rawpt-WIDTH); /* B */ - } - } else { - /* R */ - if ( i < (WIDTH*(HEIGHT-1)) && ((i % WIDTH) < (WIDTH-1)) ) { - *scanpt++ = *rawpt; /* R */ - *scanpt++ = (*(rawpt-1)+*(rawpt+1)+ - *(rawpt-WIDTH)+*(rawpt+WIDTH))/4; /* G */ - *scanpt++ = (*(rawpt-WIDTH-1)+*(rawpt-WIDTH+1)+ - *(rawpt+WIDTH-1)+*(rawpt+WIDTH+1))/4; /* B */ - } else { - /* bottom line or right column */ - *scanpt++ = *rawpt; /* R */ - *scanpt++ = (*(rawpt-1)+*(rawpt-WIDTH))/2; /* G */ - *scanpt++ = *(rawpt-WIDTH-1); /* B */ - } - } - } - rawpt++; + if ( (i/WIDTH) % 2 == 0 ) { + if ( (i % 2) == 0 ) { + /* B */ + if ( (i > WIDTH) && ((i % WIDTH) > 0) ) { + *scanpt++ = (*(rawpt-WIDTH-1)+*(rawpt-WIDTH+1)+ + *(rawpt+WIDTH-1)+*(rawpt+WIDTH+1))/4; /* R */ + *scanpt++ = (*(rawpt-1)+*(rawpt+1)+ + *(rawpt+WIDTH)+*(rawpt-WIDTH))/4; /* G */ + *scanpt++ = *rawpt; /* B */ + } else { + /* first line or left column */ + *scanpt++ = *(rawpt+WIDTH+1); /* R */ + *scanpt++ = (*(rawpt+1)+*(rawpt+WIDTH))/2; /* G */ + *scanpt++ = *rawpt; /* B */ + } + } else { + /* (B)G */ + if ( (i > WIDTH) && ((i % WIDTH) < (WIDTH-1)) ) { + *scanpt++ = (*(rawpt+WIDTH)+*(rawpt-WIDTH))/2; /* R */ + *scanpt++ = *rawpt; /* G */ + *scanpt++ = (*(rawpt-1)+*(rawpt+1))/2; /* B */ + } else { + /* first line or right column */ + *scanpt++ = *(rawpt+WIDTH); /* R */ + *scanpt++ = *rawpt; /* G */ + *scanpt++ = *(rawpt-1); /* B */ + } + } + } else { + if ( (i % 2) == 0 ) { + /* G(R) */ + if ( (i < (WIDTH*(HEIGHT-1))) && ((i % WIDTH) > 0) ) { + *scanpt++ = (*(rawpt-1)+*(rawpt+1))/2; /* R */ + *scanpt++ = *rawpt; /* G */ + *scanpt++ = (*(rawpt+WIDTH)+*(rawpt-WIDTH))/2; /* B */ + } else { + /* bottom line or left column */ + *scanpt++ = *(rawpt+1); /* R */ + *scanpt++ = *rawpt; /* G */ + *scanpt++ = *(rawpt-WIDTH); /* B */ + } + } else { + /* R */ + if ( i < (WIDTH*(HEIGHT-1)) && ((i % WIDTH) < (WIDTH-1)) ) { + *scanpt++ = *rawpt; /* R */ + *scanpt++ = (*(rawpt-1)+*(rawpt+1)+ + *(rawpt-WIDTH)+*(rawpt+WIDTH))/4; /* G */ + *scanpt++ = (*(rawpt-WIDTH-1)+*(rawpt-WIDTH+1)+ + *(rawpt+WIDTH-1)+*(rawpt+WIDTH+1))/4; /* B */ + } else { + /* bottom line or right column */ + *scanpt++ = *rawpt; /* R */ + *scanpt++ = (*(rawpt-1)+*(rawpt-WIDTH))/2; /* G */ + *scanpt++ = *(rawpt-WIDTH-1); /* B */ + } + } + } + rawpt++; } } @@ -1237,11 +1237,11 @@ static void sgbrg2rgb24(long int WIDTH, long int HEIGHT, unsigned char *src, uns *scanpt++ = (*(rawpt-WIDTH) + *(rawpt+WIDTH))/2; /* B */ } else { - /* first line or left column */ + /* first line or left column */ - *scanpt++ = *(rawpt+1); /* R */ - *scanpt++ = *(rawpt); /* G */ - *scanpt++ = *(rawpt+WIDTH); /* B */ + *scanpt++ = *(rawpt+1); /* R */ + *scanpt++ = *(rawpt); /* G */ + *scanpt++ = *(rawpt+WIDTH); /* B */ } } else //odd pixel { @@ -1306,9 +1306,9 @@ rgb24_to_rgb24 (int width, int height, unsigned char *src, unsigned char *dst) #define CLAMP(x) ((x)<0?0:((x)>255)?255:(x)) typedef struct { - int is_abs; - int len; - int val; + int is_abs; + int len; + int val; } code_table_t; @@ -1325,68 +1325,68 @@ static int init_done = 0; Each entry at index x in the table represents the codeword present at the MSB of byte x. -*/ + */ static void sonix_decompress_init(void) { - int i; - int is_abs, val, len; - - for (i = 0; i < 256; i++) { - is_abs = 0; - val = 0; - len = 0; - if ((i & 0x80) == 0) { - /* code 0 */ - val = 0; - len = 1; - } - else if ((i & 0xE0) == 0x80) { - /* code 100 */ - val = +4; - len = 3; - } - else if ((i & 0xE0) == 0xA0) { - /* code 101 */ - val = -4; - len = 3; - } - else if ((i & 0xF0) == 0xD0) { - /* code 1101 */ - val = +11; - len = 4; - } - else if ((i & 0xF0) == 0xF0) { - /* code 1111 */ - val = -11; - len = 4; - } - else if ((i & 0xF8) == 0xC8) { - /* code 11001 */ - val = +20; - len = 5; - } - else if ((i & 0xFC) == 0xC0) { - /* code 110000 */ - val = -20; - len = 6; - } - else if ((i & 0xFC) == 0xC4) { - /* code 110001xx: unknown */ - val = 0; - len = 8; - } - else if ((i & 0xF0) == 0xE0) { - /* code 1110xxxx */ - is_abs = 1; - val = (i & 0x0F) << 4; - len = 8; + int i; + int is_abs, val, len; + + for (i = 0; i < 256; i++) { + is_abs = 0; + val = 0; + len = 0; + if ((i & 0x80) == 0) { + /* code 0 */ + val = 0; + len = 1; + } + else if ((i & 0xE0) == 0x80) { + /* code 100 */ + val = +4; + len = 3; + } + else if ((i & 0xE0) == 0xA0) { + /* code 101 */ + val = -4; + len = 3; + } + else if ((i & 0xF0) == 0xD0) { + /* code 1101 */ + val = +11; + len = 4; + } + else if ((i & 0xF0) == 0xF0) { + /* code 1111 */ + val = -11; + len = 4; + } + else if ((i & 0xF8) == 0xC8) { + /* code 11001 */ + val = +20; + len = 5; + } + else if ((i & 0xFC) == 0xC0) { + /* code 110000 */ + val = -20; + len = 6; + } + else if ((i & 0xFC) == 0xC4) { + /* code 110001xx: unknown */ + val = 0; + len = 8; + } + else if ((i & 0xF0) == 0xE0) { + /* code 1110xxxx */ + is_abs = 1; + val = (i & 0x0F) << 4; + len = 8; + } + table[i].is_abs = is_abs; + table[i].val = val; + table[i].len = len; } - table[i].is_abs = is_abs; - table[i].val = val; - table[i].len = len; - } - init_done = 1; + init_done = 1; } @@ -1403,75 +1403,75 @@ static void sonix_decompress_init(void) Returns 0 if the operation was successful. Returns <0 if operation failed. -*/ + */ static int sonix_decompress(int width, int height, unsigned char *inp, unsigned char *outp) { - int row, col; - int val; - int bitpos; - unsigned char code; - unsigned char *addr; - - if (!init_done) { - /* do sonix_decompress_init first! */ - return -1; - } - - bitpos = 0; - for (row = 0; row < height; row++) { + int row, col; + int val; + int bitpos; + unsigned char code; + unsigned char *addr; + + if (!init_done) { + /* do sonix_decompress_init first! */ + return -1; + } - col = 0; + bitpos = 0; + for (row = 0; row < height; row++) { + col = 0; - /* first two pixels in first two rows are stored as raw 8-bit */ - if (row < 2) { - addr = inp + (bitpos >> 3); - code = (addr[0] << (bitpos & 7)) | (addr[1] >> (8 - (bitpos & 7))); - bitpos += 8; - *outp++ = code; - addr = inp + (bitpos >> 3); - code = (addr[0] << (bitpos & 7)) | (addr[1] >> (8 - (bitpos & 7))); - bitpos += 8; - *outp++ = code; + /* first two pixels in first two rows are stored as raw 8-bit */ + if (row < 2) { + addr = inp + (bitpos >> 3); + code = (addr[0] << (bitpos & 7)) | (addr[1] >> (8 - (bitpos & 7))); + bitpos += 8; + *outp++ = code; - col += 2; - } + addr = inp + (bitpos >> 3); + code = (addr[0] << (bitpos & 7)) | (addr[1] >> (8 - (bitpos & 7))); + bitpos += 8; + *outp++ = code; - while (col < width) { - /* get bitcode from bitstream */ - addr = inp + (bitpos >> 3); - code = (addr[0] << (bitpos & 7)) | (addr[1] >> (8 - (bitpos & 7))); - - /* update bit position */ - bitpos += table[code].len; - - /* calculate pixel value */ - val = table[code].val; - if (!table[code].is_abs) { - /* value is relative to top and left pixel */ - if (col < 2) { - /* left column: relative to top pixel */ - val += outp[-2*width]; + col += 2; } - else if (row < 2) { - /* top row: relative to left pixel */ - val += outp[-2]; - } - else { - /* main area: average of left pixel and top pixel */ - val += (outp[-2] + outp[-2*width]) / 2; - } - } - /* store pixel */ - *outp++ = CLAMP(val); - col++; + while (col < width) { + /* get bitcode from bitstream */ + addr = inp + (bitpos >> 3); + code = (addr[0] << (bitpos & 7)) | (addr[1] >> (8 - (bitpos & 7))); + + /* update bit position */ + bitpos += table[code].len; + + /* calculate pixel value */ + val = table[code].val; + if (!table[code].is_abs) { + /* value is relative to top and left pixel */ + if (col < 2) { + /* left column: relative to top pixel */ + val += outp[-2*width]; + } + else if (row < 2) { + /* top row: relative to left pixel */ + val += outp[-2]; + } + else { + /* main area: average of left pixel and top pixel */ + val += (outp[-2] + outp[-2*width]) / 2; + } + } + + /* store pixel */ + *outp++ = CLAMP(val); + col++; + } } - } - return 0; + return 0; } static IplImage* icvRetrieveFrameCAM_V4L( CvCaptureCAM_V4L* capture, int) { @@ -1500,47 +1500,47 @@ static IplImage* icvRetrieveFrameCAM_V4L( CvCaptureCAM_V4L* capture, int) { { case V4L2_PIX_FMT_BGR24: memcpy((char *)capture->frame.imageData, - (char *)capture->buffers[capture->bufferIndex].start, - capture->frame.imageSize); + (char *)capture->buffers[capture->bufferIndex].start, + capture->frame.imageSize); break; case V4L2_PIX_FMT_YVU420: yuv420p_to_rgb24(capture->form.fmt.pix.width, - capture->form.fmt.pix.height, - (unsigned char*)(capture->buffers[capture->bufferIndex].start), - (unsigned char*)capture->frame.imageData); + capture->form.fmt.pix.height, + (unsigned char*)(capture->buffers[capture->bufferIndex].start), + (unsigned char*)capture->frame.imageData); break; case V4L2_PIX_FMT_YUV411P: yuv411p_to_rgb24(capture->form.fmt.pix.width, - capture->form.fmt.pix.height, - (unsigned char*)(capture->buffers[capture->bufferIndex].start), - (unsigned char*)capture->frame.imageData); + capture->form.fmt.pix.height, + (unsigned char*)(capture->buffers[capture->bufferIndex].start), + (unsigned char*)capture->frame.imageData); break; #ifdef HAVE_JPEG case V4L2_PIX_FMT_MJPEG: case V4L2_PIX_FMT_JPEG: if (!mjpeg_to_rgb24(capture->form.fmt.pix.width, - capture->form.fmt.pix.height, - (unsigned char*)(capture->buffers[capture->bufferIndex] - .start), - capture->buffers[capture->bufferIndex].length, - &capture->frame)) - return 0; + capture->form.fmt.pix.height, + (unsigned char*)(capture->buffers[capture->bufferIndex] + .start), + capture->buffers[capture->bufferIndex].length, + &capture->frame)) + return 0; break; #endif case V4L2_PIX_FMT_YUYV: yuyv_to_rgb24(capture->form.fmt.pix.width, - capture->form.fmt.pix.height, - (unsigned char*)(capture->buffers[capture->bufferIndex].start), - (unsigned char*)capture->frame.imageData); + capture->form.fmt.pix.height, + (unsigned char*)(capture->buffers[capture->bufferIndex].start), + (unsigned char*)capture->frame.imageData); break; case V4L2_PIX_FMT_UYVY: uyvy_to_rgb24(capture->form.fmt.pix.width, - capture->form.fmt.pix.height, - (unsigned char*)(capture->buffers[capture->bufferIndex].start), - (unsigned char*)capture->frame.imageData); + capture->form.fmt.pix.height, + (unsigned char*)(capture->buffers[capture->bufferIndex].start), + (unsigned char*)capture->frame.imageData); break; case V4L2_PIX_FMT_SBGGR8: bayer2rgb24(capture->form.fmt.pix.width, @@ -1552,9 +1552,9 @@ static IplImage* icvRetrieveFrameCAM_V4L( CvCaptureCAM_V4L* capture, int) { case V4L2_PIX_FMT_SN9C10X: sonix_decompress_init(); sonix_decompress(capture->form.fmt.pix.width, - capture->form.fmt.pix.height, - (unsigned char*)capture->buffers[capture->bufferIndex].start, - (unsigned char*)capture->buffers[(capture->bufferIndex+1) % capture->req.count].start); + capture->form.fmt.pix.height, + (unsigned char*)capture->buffers[capture->bufferIndex].start, + (unsigned char*)capture->buffers[(capture->bufferIndex+1) % capture->req.count].start); bayer2rgb24(capture->form.fmt.pix.width, capture->form.fmt.pix.height, @@ -1577,13 +1577,13 @@ static IplImage* icvRetrieveFrameCAM_V4L( CvCaptureCAM_V4L* capture, int) { case V4L2_PIX_FMT_Y16: if(capture->convert_rgb){ y16_to_rgb24(capture->form.fmt.pix.width, - capture->form.fmt.pix.height, - (unsigned char*)capture->buffers[capture->bufferIndex].start, - (unsigned char*)capture->frame.imageData); + capture->form.fmt.pix.height, + (unsigned char*)capture->buffers[capture->bufferIndex].start, + (unsigned char*)capture->frame.imageData); }else{ memcpy((char *)capture->frame.imageData, - (char *)capture->buffers[capture->bufferIndex].start, - capture->frame.imageSize); + (char *)capture->buffers[capture->bufferIndex].start, + capture->frame.imageSize); } break; } @@ -1620,121 +1620,121 @@ static inline __u32 capPropertyToV4L2(int prop) { } static double icvGetPropertyCAM_V4L (const CvCaptureCAM_V4L* capture, - int property_id ) { - { - v4l2_format form; - memset(&form, 0, sizeof(v4l2_format)); - form.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - if (-1 == ioctl (capture->deviceHandle, VIDIOC_G_FMT, &form)) { - /* display an error message, and return an error code */ - perror ("VIDIOC_G_FMT"); - return -1; - } - - switch (property_id) { - case CV_CAP_PROP_FRAME_WIDTH: - return form.fmt.pix.width; - case CV_CAP_PROP_FRAME_HEIGHT: - return form.fmt.pix.height; - case CV_CAP_PROP_FOURCC: - case CV_CAP_PROP_MODE: - return capture->palette; - case CV_CAP_PROP_FORMAT: - return CV_MAKETYPE(IPL2CV_DEPTH(capture->frame.depth), capture->frame.nChannels); - case CV_CAP_PROP_CONVERT_RGB: - return capture->convert_rgb; - case CV_CAP_PROP_BUFFERSIZE: - return capture->bufferSize; - } - - if(property_id == CV_CAP_PROP_FPS) { - v4l2_streamparm sp = v4l2_streamparm(); - sp.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - if (ioctl(capture->deviceHandle, VIDIOC_G_PARM, &sp) < 0){ - fprintf(stderr, "VIDEOIO ERROR: V4L: Unable to get camera FPS\n"); - return -1; - } - - return sp.parm.capture.timeperframe.denominator / (double)sp.parm.capture.timeperframe.numerator; - } - - /* initialize the control structure */ - - if(property_id == CV_CAP_PROP_POS_MSEC) { - if (capture->FirstCapture) { - return 0; - } else { - return 1000 * capture->timestamp.tv_sec + ((double) capture->timestamp.tv_usec) / 1000; - } - } - - __u32 v4l2id = capPropertyToV4L2(property_id); - - if(v4l2id == __u32(-1)) { - fprintf(stderr, - "VIDEOIO ERROR: V4L2: getting property #%d is not supported\n", - property_id); - return -1; - } - - v4l2_control control = {v4l2id, 0}; - - if (-1 == ioctl (capture->deviceHandle, VIDIOC_G_CTRL, - &control)) { - - fprintf( stderr, "VIDEOIO ERROR: V4L2: "); - switch (property_id) { - case CV_CAP_PROP_BRIGHTNESS: - fprintf (stderr, "Brightness"); - break; - case CV_CAP_PROP_CONTRAST: - fprintf (stderr, "Contrast"); - break; - case CV_CAP_PROP_SATURATION: - fprintf (stderr, "Saturation"); - break; - case CV_CAP_PROP_HUE: - fprintf (stderr, "Hue"); - break; - case CV_CAP_PROP_GAIN: - fprintf (stderr, "Gain"); - break; - case CV_CAP_PROP_AUTO_EXPOSURE: - fprintf (stderr, "Auto Exposure"); - break; - case CV_CAP_PROP_EXPOSURE: - fprintf (stderr, "Exposure"); - break; - case CV_CAP_PROP_AUTOFOCUS: - fprintf (stderr, "Autofocus"); - break; - case CV_CAP_PROP_FOCUS: - fprintf (stderr, "Focus"); - break; - } - fprintf (stderr, " is not supported by your device\n"); - - return -1; - } - - /* get the min/max values */ - Range range = capture->getRange(property_id); - - /* all was OK, so convert to 0.0 - 1.0 range, and return the value */ - return ((double)control.value - range.start) / range.size(); - - } + int property_id ) { + { + v4l2_format form; + memset(&form, 0, sizeof(v4l2_format)); + form.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + if (-1 == ioctl (capture->deviceHandle, VIDIOC_G_FMT, &form)) { + /* display an error message, and return an error code */ + perror ("VIDIOC_G_FMT"); + return -1; + } + + switch (property_id) { + case CV_CAP_PROP_FRAME_WIDTH: + return form.fmt.pix.width; + case CV_CAP_PROP_FRAME_HEIGHT: + return form.fmt.pix.height; + case CV_CAP_PROP_FOURCC: + case CV_CAP_PROP_MODE: + return capture->palette; + case CV_CAP_PROP_FORMAT: + return CV_MAKETYPE(IPL2CV_DEPTH(capture->frame.depth), capture->frame.nChannels); + case CV_CAP_PROP_CONVERT_RGB: + return capture->convert_rgb; + case CV_CAP_PROP_BUFFERSIZE: + return capture->bufferSize; + } + + if(property_id == CV_CAP_PROP_FPS) { + v4l2_streamparm sp = v4l2_streamparm(); + sp.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + if (ioctl(capture->deviceHandle, VIDIOC_G_PARM, &sp) < 0){ + fprintf(stderr, "VIDEOIO ERROR: V4L: Unable to get camera FPS\n"); + return -1; + } + + return sp.parm.capture.timeperframe.denominator / (double)sp.parm.capture.timeperframe.numerator; + } + + /* initialize the control structure */ + + if(property_id == CV_CAP_PROP_POS_MSEC) { + if (capture->FirstCapture) { + return 0; + } else { + return 1000 * capture->timestamp.tv_sec + ((double) capture->timestamp.tv_usec) / 1000; + } + } + + __u32 v4l2id = capPropertyToV4L2(property_id); + + if(v4l2id == __u32(-1)) { + fprintf(stderr, + "VIDEOIO ERROR: V4L2: getting property #%d is not supported\n", + property_id); + return -1; + } + + v4l2_control control = {v4l2id, 0}; + + if (-1 == ioctl (capture->deviceHandle, VIDIOC_G_CTRL, + &control)) { + + fprintf( stderr, "VIDEOIO ERROR: V4L2: "); + switch (property_id) { + case CV_CAP_PROP_BRIGHTNESS: + fprintf (stderr, "Brightness"); + break; + case CV_CAP_PROP_CONTRAST: + fprintf (stderr, "Contrast"); + break; + case CV_CAP_PROP_SATURATION: + fprintf (stderr, "Saturation"); + break; + case CV_CAP_PROP_HUE: + fprintf (stderr, "Hue"); + break; + case CV_CAP_PROP_GAIN: + fprintf (stderr, "Gain"); + break; + case CV_CAP_PROP_AUTO_EXPOSURE: + fprintf (stderr, "Auto Exposure"); + break; + case CV_CAP_PROP_EXPOSURE: + fprintf (stderr, "Exposure"); + break; + case CV_CAP_PROP_AUTOFOCUS: + fprintf (stderr, "Autofocus"); + break; + case CV_CAP_PROP_FOCUS: + fprintf (stderr, "Focus"); + break; + } + fprintf (stderr, " is not supported by your device\n"); + + return -1; + } + + /* get the min/max values */ + Range range = capture->getRange(property_id); + + /* all was OK, so convert to 0.0 - 1.0 range, and return the value */ + return ((double)control.value - range.start) / range.size(); + + } }; static bool icvSetControl (CvCaptureCAM_V4L* capture, - int property_id, double value) { + int property_id, double value) { - /* limitation of the input value */ - if (value < 0.0) { - value = 0.0; - } else if (value > 1.0) { - value = 1.0; - } + /* limitation of the input value */ + if (value < 0.0) { + value = 0.0; + } else if (value > 1.0) { + value = 1.0; + } /* initialisations */ __u32 v4l2id = capPropertyToV4L2(property_id); @@ -1773,7 +1773,7 @@ static bool icvSetControl (CvCaptureCAM_V4L* capture, } static int icvSetPropertyCAM_V4L( CvCaptureCAM_V4L* capture, - int property_id, double value ){ + int property_id, double value ){ static int width = 0, height = 0; bool retval = false; bool possible; @@ -1813,19 +1813,19 @@ static int icvSetPropertyCAM_V4L( CvCaptureCAM_V4L* capture, retval = possible || !bool(value); break; case CV_CAP_PROP_FOURCC: - { - __u32 old_palette = capture->palette; - __u32 new_palette = static_cast<__u32>(value); - capture->palette = new_palette; - if (v4l2_reset(capture)) { - retval = true; - } else { - capture->palette = old_palette; - v4l2_reset(capture); - retval = false; - } + { + __u32 old_palette = capture->palette; + __u32 new_palette = static_cast<__u32>(value); + capture->palette = new_palette; + if (v4l2_reset(capture)) { + retval = true; + } else { + capture->palette = old_palette; + v4l2_reset(capture); + retval = false; } - break; + } + break; case CV_CAP_PROP_BUFFERSIZE: if ((int)value > MAX_V4L_BUFFERS || (int)value < 1) { fprintf(stderr, "V4L: Bad buffer size %d, buffer size must be from 1 to %d\n", (int)value, MAX_V4L_BUFFERS); @@ -1848,43 +1848,43 @@ static int icvSetPropertyCAM_V4L( CvCaptureCAM_V4L* capture, } static void icvCloseCAM_V4L( CvCaptureCAM_V4L* capture ){ - /* Deallocate space - Hopefully, no leaks */ - - if (!capture->deviceName.empty()) - { - if (capture->deviceHandle != -1) - { - capture->type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - if (-1 == ioctl(capture->deviceHandle, VIDIOC_STREAMOFF, &capture->type)) { - perror ("Unable to stop the stream"); - } - - for (unsigned int n_buffers_ = 0; n_buffers_ < MAX_V4L_BUFFERS; ++n_buffers_) - { - if (capture->buffers[n_buffers_].start) { - if (-1 == munmap (capture->buffers[n_buffers_].start, capture->buffers[n_buffers_].length)) { - perror ("munmap"); - } else { - capture->buffers[n_buffers_].start = 0; - } - } - } - - if (capture->buffers[MAX_V4L_BUFFERS].start) - { - free(capture->buffers[MAX_V4L_BUFFERS].start); - capture->buffers[MAX_V4L_BUFFERS].start = 0; - } - } - - if (capture->deviceHandle != -1) - close(capture->deviceHandle); - - if (capture->frame_allocated && capture->frame.imageData) - cvFree(&capture->frame.imageData); - - capture->deviceName.clear(); // flag that the capture is closed - } + /* Deallocate space - Hopefully, no leaks */ + + if (!capture->deviceName.empty()) + { + if (capture->deviceHandle != -1) + { + capture->type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + if (-1 == ioctl(capture->deviceHandle, VIDIOC_STREAMOFF, &capture->type)) { + perror ("Unable to stop the stream"); + } + + for (unsigned int n_buffers_ = 0; n_buffers_ < MAX_V4L_BUFFERS; ++n_buffers_) + { + if (capture->buffers[n_buffers_].start) { + if (-1 == munmap (capture->buffers[n_buffers_].start, capture->buffers[n_buffers_].length)) { + perror ("munmap"); + } else { + capture->buffers[n_buffers_].start = 0; + } + } + } + + if (capture->buffers[MAX_V4L_BUFFERS].start) + { + free(capture->buffers[MAX_V4L_BUFFERS].start); + capture->buffers[MAX_V4L_BUFFERS].start = 0; + } + } + + if (capture->deviceHandle != -1) + close(capture->deviceHandle); + + if (capture->frame_allocated && capture->frame.imageData) + cvFree(&capture->frame.imageData); + + capture->deviceName.clear(); // flag that the capture is closed + } }; bool CvCaptureCAM_V4L::grabFrame() From 83d915eca1f4f3e1cb28b2d7444890d911e7f9a9 Mon Sep 17 00:00:00 2001 From: Alexander Alekhin Date: Thu, 31 May 2018 14:03:02 +0300 Subject: [PATCH 15/40] videoio(v4l): don't pre-scan or cache list of available cameras --- modules/videoio/src/cap_v4l.cpp | 88 ++++++++++++--------------------- 1 file changed, 32 insertions(+), 56 deletions(-) diff --git a/modules/videoio/src/cap_v4l.cpp b/modules/videoio/src/cap_v4l.cpp index e292b7290a..f236131ad7 100644 --- a/modules/videoio/src/cap_v4l.cpp +++ b/modules/videoio/src/cap_v4l.cpp @@ -347,43 +347,10 @@ static int icvSetPropertyCAM_V4L( CvCaptureCAM_V4L* capture, int property_id, /*********************** Implementations ***************************************/ -static int numCameras = 0; -static int indexList = 0; - CvCaptureCAM_V4L::~CvCaptureCAM_V4L() { icvCloseCAM_V4L(this); } -/* Simple test program: Find number of Video Sources available. - Start from 0 and go to MAX_CAMERAS while checking for the device with that name. - If it fails on the first attempt of /dev/video0, then check if /dev/video is valid. - Returns the global numCameras with the correct value (we hope) */ - -static void icvInitCapture_V4L() { - int deviceHandle; - int CameraNumber; - char deviceName[MAX_DEVICE_DRIVER_NAME]; - - CameraNumber = 0; - while(CameraNumber < MAX_CAMERAS) { - /* Print the CameraNumber at the end of the string with a width of one character */ - sprintf(deviceName, "/dev/video%1d", CameraNumber); - /* Test using an open to see if this new device name really does exists. */ - deviceHandle = open(deviceName, O_RDONLY); - if (deviceHandle != -1) { - /* This device does indeed exist - add it to the total so far */ - // add indexList - indexList|=(1 << CameraNumber); - numCameras++; - } - if (deviceHandle != -1) - close(deviceHandle); - /* Set up to test the next /dev/video source in line */ - CameraNumber++; - } /* End while */ - -}; /* End icvInitCapture_V4L */ - static bool try_palette_v4l2(CvCaptureCAM_V4L* capture) { capture->form = v4l2_format(); @@ -785,39 +752,48 @@ static bool v4l2_reset( CvCaptureCAM_V4L* capture) { bool CvCaptureCAM_V4L::open(int _index) { - int autoindex = 0; - char _deviceName[MAX_DEVICE_DRIVER_NAME]; - - if (!numCameras) - icvInitCapture_V4L(); /* Haven't called icvInitCapture yet - do it now! */ - if (!numCameras) - return false; /* Are there any /dev/video input sources? */ - - //search index in indexList - if ( (_index>-1) && ! ((1 << _index) & indexList) ) - { - fprintf( stderr, "VIDEOIO ERROR: V4L: index %d is not correct!\n",_index); - return false; /* Did someone ask for not correct video source number? */ - } - + cv::String name; /* Select camera, or rather, V4L video source */ - if (_index<0) { // Asking for the first device available - for (; autoindex Date: Thu, 31 May 2018 14:11:47 +0300 Subject: [PATCH 16/40] videoio(v4l): eliminate global variable --- modules/videoio/src/cap_v4l.cpp | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/modules/videoio/src/cap_v4l.cpp b/modules/videoio/src/cap_v4l.cpp index f236131ad7..adf5524e39 100644 --- a/modules/videoio/src/cap_v4l.cpp +++ b/modules/videoio/src/cap_v4l.cpp @@ -265,8 +265,6 @@ struct buffer size_t length; }; -static unsigned int n_buffers = 0; - struct CvCaptureCAM_V4L CV_FINAL : public CvCapture { int deviceHandle; @@ -693,7 +691,7 @@ try_again: } } - for (n_buffers = 0; n_buffers < capture->req.count; ++n_buffers) + for (unsigned int n_buffers = 0; n_buffers < capture->req.count; ++n_buffers) { v4l2_buffer buf = v4l2_buffer(); buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; @@ -1835,13 +1833,13 @@ static void icvCloseCAM_V4L( CvCaptureCAM_V4L* capture ){ perror ("Unable to stop the stream"); } - for (unsigned int n_buffers_ = 0; n_buffers_ < MAX_V4L_BUFFERS; ++n_buffers_) + for (unsigned int n_buffers = 0; n_buffers < MAX_V4L_BUFFERS; ++n_buffers) { - if (capture->buffers[n_buffers_].start) { - if (-1 == munmap (capture->buffers[n_buffers_].start, capture->buffers[n_buffers_].length)) { + if (capture->buffers[n_buffers].start) { + if (-1 == munmap (capture->buffers[n_buffers].start, capture->buffers[n_buffers].length)) { perror ("munmap"); } else { - capture->buffers[n_buffers_].start = 0; + capture->buffers[n_buffers].start = 0; } } } From 32bab45f814132abb362165789c6aa697f44ab31 Mon Sep 17 00:00:00 2001 From: Dmitry Kurtaev Date: Thu, 31 May 2018 16:16:34 +0300 Subject: [PATCH 17/40] Fix Inference Engine graphs with fused output layers --- modules/dnn/src/dnn.cpp | 2 +- modules/dnn/test/test_layers.cpp | 53 ++++++++++++++++++++++++++++++++ 2 files changed, 54 insertions(+), 1 deletion(-) diff --git a/modules/dnn/src/dnn.cpp b/modules/dnn/src/dnn.cpp index 973c98abc3..a5656821c6 100644 --- a/modules/dnn/src/dnn.cpp +++ b/modules/dnn/src/dnn.cpp @@ -1132,7 +1132,7 @@ struct Net::Impl if (layerNet != ieInpNode->net) { // layerNet is empty or nodes are from different graphs. - ieInpNode->net->addOutput(inpLd.name); + ieInpNode->net->addOutput(ieInpNode->layer->name); } } } diff --git a/modules/dnn/test/test_layers.cpp b/modules/dnn/test/test_layers.cpp index 89c6ed8915..5cbfba5517 100644 --- a/modules/dnn/test/test_layers.cpp +++ b/modules/dnn/test/test_layers.cpp @@ -834,6 +834,59 @@ TEST(Test_DLDT, two_inputs) normAssert(out, firstInp + secondInp); } + +class UnsupportedLayer : public Layer +{ +public: + UnsupportedLayer(const LayerParams ¶ms) {} + + static Ptr create(const LayerParams& params) + { + return Ptr(new UnsupportedLayer(params)); + } + + virtual bool supportBackend(int backendId) CV_OVERRIDE + { + return backendId == DNN_BACKEND_DEFAULT; + } + + virtual void forward(std::vector &inputs, std::vector &outputs, std::vector &internals) CV_OVERRIDE {} + + virtual void forward(cv::InputArrayOfArrays inputs, cv::OutputArrayOfArrays outputs, cv::OutputArrayOfArrays internals) CV_OVERRIDE {} +}; + +TEST(Test_DLDT, fused_output) +{ + static const int kNumChannels = 3; + CV_DNN_REGISTER_LAYER_CLASS(Unsupported, UnsupportedLayer); + Net net; + { + LayerParams lp; + lp.set("kernel_size", 1); + lp.set("num_output", 3); + lp.set("bias_term", false); + lp.type = "Convolution"; + lp.name = "testConv"; + lp.blobs.push_back(Mat({kNumChannels, 1, 1, 1}, CV_32F, Scalar(1))); + net.addLayerToPrev(lp.name, lp.type, lp); + } + { + LayerParams lp; + lp.set("bias_term", false); + lp.type = "Scale"; + lp.name = "testScale"; + lp.blobs.push_back(Mat({kNumChannels}, CV_32F, Scalar(1))); + net.addLayerToPrev(lp.name, lp.type, lp); + } + { + LayerParams lp; + net.addLayerToPrev("unsupported_layer", "Unsupported", lp); + } + net.setPreferableBackend(DNN_BACKEND_INFERENCE_ENGINE); + net.setInput(Mat({1, 1, 1, 1}, CV_32FC1, Scalar(1))); + ASSERT_NO_THROW(net.forward()); + LayerFactory::unregisterLayer("Unsupported"); +} #endif // HAVE_INF_ENGINE // Test a custom layer. From 9ba9358ecbbd9f37b09663ccfa8899a839f95ec6 Mon Sep 17 00:00:00 2001 From: Alexander Alekhin Date: Thu, 31 May 2018 16:45:18 +0300 Subject: [PATCH 18/40] documentation: avoid links to 'master' branch from 3.4 maintenance branch --- .../py_svm_opencv/py_svm_opencv.markdown | 2 +- .../camera_calibration.markdown | 6 +++--- .../file_input_output_with_xml_yml.markdown | 2 +- .../how_to_scan_images.markdown | 2 +- .../how_to_use_ippa_conversion.markdown | 2 +- .../interoperability_with_OpenCV_1.markdown | 4 ++-- .../mat_the_basic_image_container.markdown | 2 +- .../dnn/dnn_custom_layers/dnn_custom_layers.md | 2 +- .../dnn/dnn_googlenet/dnn_googlenet.markdown | 2 +- .../feature_description.markdown | 6 +++--- .../feature_detection.markdown | 6 +++--- .../feature_flann_matcher.markdown | 6 +++--- .../feature_homography.markdown | 6 +++--- .../features2d/homography/homography.markdown | 4 ++-- .../corner_subpixels/corner_subpixels.markdown | 6 +++--- .../generic_corner_detector.markdown | 6 +++--- .../good_features_to_track.markdown | 6 +++--- .../harris_detector/harris_detector.markdown | 6 +++--- .../gpu_basics_similarity.markdown | 2 +- .../highgui/trackbar/trackbar.markdown | 6 +++--- .../erosion_dilatation.markdown | 6 +++--- .../back_projection/back_projection.markdown | 18 +++++++++--------- .../histogram_calculation.markdown | 6 +++--- .../histogram_comparison.markdown | 6 +++--- .../histogram_equalization.markdown | 6 +++--- .../template_matching.markdown | 6 +++--- .../canny_detector/canny_detector.markdown | 6 +++--- .../distance_transform.markdown | 2 +- .../imgproc/imgtrans/remap/remap.markdown | 6 +++--- .../opening_closing_hats.markdown | 6 +++--- .../bounding_rects_circles.markdown | 2 +- .../bounding_rotated_ellipses.markdown | 2 +- .../find_contours/find_contours.markdown | 2 +- .../shapedescriptors/hull/hull.markdown | 2 +- .../shapedescriptors/moments/moments.markdown | 2 +- .../point_polygon_test.markdown | 2 +- .../imgproc/threshold/threshold.markdown | 6 +++--- .../threshold_inRange.markdown | 6 +++--- .../android_ocl_intro.markdown | 4 ++-- .../display_image/display_image.markdown | 2 +- .../windows_visual_studio_opencv.markdown | 4 ++-- .../introduction_to_pca.markdown | 4 ++-- .../non_linear_svms/non_linear_svms.markdown | 2 +- .../cascade_classifier.markdown | 2 +- doc/tutorials/objdetect/traincascade.markdown | 2 +- .../stitching/stitcher/stitcher.markdown | 4 ++-- .../background_subtraction.markdown | 2 +- doc/tutorials/videoio/intelperc.markdown | 2 +- doc/tutorials/videoio/kinect_openni.markdown | 2 +- .../video_input_psnr_ssim.markdown | 4 ++-- .../videoio/video-write/video_write.markdown | 2 +- .../creating_widgets/creating_widgets.markdown | 2 +- doc/tutorials/viz/histo3D/histo3D.markdown | 2 +- .../viz/launching_viz/launching_viz.markdown | 2 +- .../transformations/transformations.markdown | 2 +- .../viz/widget_pose/widget_pose.markdown | 2 +- modules/core/doc/cuda.markdown | 2 +- modules/highgui/include/opencv2/highgui.hpp | 2 +- .../objdetect/include/opencv2/objdetect.hpp | 2 +- samples/cpp/facial_features.cpp | 2 +- samples/dnn/README.md | 6 +++--- 61 files changed, 118 insertions(+), 118 deletions(-) diff --git a/doc/py_tutorials/py_ml/py_svm/py_svm_opencv/py_svm_opencv.markdown b/doc/py_tutorials/py_ml/py_svm/py_svm_opencv/py_svm_opencv.markdown index dd034e9afa..a9502996f6 100644 --- a/doc/py_tutorials/py_ml/py_svm/py_svm_opencv/py_svm_opencv.markdown +++ b/doc/py_tutorials/py_ml/py_svm/py_svm_opencv/py_svm_opencv.markdown @@ -36,7 +36,7 @@ gives us a feature vector containing 64 values. This is the feature vector we us Finally, as in the previous case, we start by splitting our big dataset into individual cells. For every digit, 250 cells are reserved for training data and remaining 250 data is reserved for -testing. Full code is given below, you also can download it from [here](https://github.com/opencv/opencv/tree/master/samples/python/tutorial_code/ml/py_svm_opencv/hogsvm.py): +testing. Full code is given below, you also can download it from [here](https://github.com/opencv/opencv/tree/3.4/samples/python/tutorial_code/ml/py_svm_opencv/hogsvm.py): @include samples/python/tutorial_code/ml/py_svm_opencv/hogsvm.py diff --git a/doc/tutorials/calib3d/camera_calibration/camera_calibration.markdown b/doc/tutorials/calib3d/camera_calibration/camera_calibration.markdown index a7a7dd727e..6ce791cd0a 100644 --- a/doc/tutorials/calib3d/camera_calibration/camera_calibration.markdown +++ b/doc/tutorials/calib3d/camera_calibration/camera_calibration.markdown @@ -77,13 +77,13 @@ Source code You may also find the source code in the `samples/cpp/tutorial_code/calib3d/camera_calibration/` folder of the OpenCV source library or [download it from here -](https://github.com/opencv/opencv/tree/master/samples/cpp/tutorial_code/calib3d/camera_calibration/camera_calibration.cpp). The program has a +](https://github.com/opencv/opencv/tree/3.4/samples/cpp/tutorial_code/calib3d/camera_calibration/camera_calibration.cpp). The program has a single argument: the name of its configuration file. If none is given then it will try to open the one named "default.xml". [Here's a sample configuration file -](https://github.com/opencv/opencv/tree/master/samples/cpp/tutorial_code/calib3d/camera_calibration/in_VID5.xml) in XML format. In the +](https://github.com/opencv/opencv/tree/3.4/samples/cpp/tutorial_code/calib3d/camera_calibration/in_VID5.xml) in XML format. In the configuration file you may choose to use camera as an input, a video file or an image list. If you opt for the last one, you will need to create a configuration file where you enumerate the images to -use. Here's [an example of this ](https://github.com/opencv/opencv/tree/master/samples/cpp/tutorial_code/calib3d/camera_calibration/VID5.xml). +use. Here's [an example of this ](https://github.com/opencv/opencv/tree/3.4/samples/cpp/tutorial_code/calib3d/camera_calibration/VID5.xml). The important part to remember is that the images need to be specified using the absolute path or the relative one from your application's working directory. You may find all this in the samples directory mentioned above. diff --git a/doc/tutorials/core/file_input_output_with_xml_yml/file_input_output_with_xml_yml.markdown b/doc/tutorials/core/file_input_output_with_xml_yml/file_input_output_with_xml_yml.markdown index d447481c88..5e46343373 100644 --- a/doc/tutorials/core/file_input_output_with_xml_yml/file_input_output_with_xml_yml.markdown +++ b/doc/tutorials/core/file_input_output_with_xml_yml/file_input_output_with_xml_yml.markdown @@ -16,7 +16,7 @@ Source code ----------- You can [download this from here -](https://github.com/opencv/opencv/tree/master/samples/cpp/tutorial_code/core/file_input_output/file_input_output.cpp) or find it in the +](https://github.com/opencv/opencv/tree/3.4/samples/cpp/tutorial_code/core/file_input_output/file_input_output.cpp) or find it in the `samples/cpp/tutorial_code/core/file_input_output/file_input_output.cpp` of the OpenCV source code library. diff --git a/doc/tutorials/core/how_to_scan_images/how_to_scan_images.markdown b/doc/tutorials/core/how_to_scan_images/how_to_scan_images.markdown index 4d5881943e..658147eccf 100644 --- a/doc/tutorials/core/how_to_scan_images/how_to_scan_images.markdown +++ b/doc/tutorials/core/how_to_scan_images/how_to_scan_images.markdown @@ -51,7 +51,7 @@ three major ways of going through an image pixel by pixel. To make things a litt will make the scanning for each image using all of these methods, and print out how long it took. You can download the full source code [here -](https://github.com/opencv/opencv/tree/master/samples/cpp/tutorial_code/core/how_to_scan_images/how_to_scan_images.cpp) or look it up in +](https://github.com/opencv/opencv/tree/3.4/samples/cpp/tutorial_code/core/how_to_scan_images/how_to_scan_images.cpp) or look it up in the samples directory of OpenCV at the cpp tutorial code for the core section. Its basic usage is: @code{.bash} how_to_scan_images imageName.jpg intValueToReduce [G] diff --git a/doc/tutorials/core/how_to_use_ippa_conversion/how_to_use_ippa_conversion.markdown b/doc/tutorials/core/how_to_use_ippa_conversion/how_to_use_ippa_conversion.markdown index 50f3b545ef..647b280cc7 100644 --- a/doc/tutorials/core/how_to_use_ippa_conversion/how_to_use_ippa_conversion.markdown +++ b/doc/tutorials/core/how_to_use_ippa_conversion/how_to_use_ippa_conversion.markdown @@ -16,7 +16,7 @@ Code You may also find the source code in the `samples/cpp/tutorial_code/core/ippasync/ippasync_sample.cpp` file of the OpenCV source library or -download it from [here](https://github.com/opencv/opencv/tree/master/samples/cpp/tutorial_code/core/ippasync/ippasync_sample.cpp). +download it from [here](https://github.com/opencv/opencv/tree/3.4/samples/cpp/tutorial_code/core/ippasync/ippasync_sample.cpp). @include cpp/tutorial_code/core/ippasync/ippasync_sample.cpp diff --git a/doc/tutorials/core/interoperability_with_OpenCV_1/interoperability_with_OpenCV_1.markdown b/doc/tutorials/core/interoperability_with_OpenCV_1/interoperability_with_OpenCV_1.markdown index 298c45b3b1..f98c93fbc5 100644 --- a/doc/tutorials/core/interoperability_with_OpenCV_1/interoperability_with_OpenCV_1.markdown +++ b/doc/tutorials/core/interoperability_with_OpenCV_1/interoperability_with_OpenCV_1.markdown @@ -85,7 +85,7 @@ L = Mat(pI); A case study ------------ -Now that you have the basics done [here's](https://github.com/opencv/opencv/tree/master/samples/cpp/tutorial_code/core/interoperability_with_OpenCV_1/interoperability_with_OpenCV_1.cpp) +Now that you have the basics done [here's](https://github.com/opencv/opencv/tree/3.4/samples/cpp/tutorial_code/core/interoperability_with_OpenCV_1/interoperability_with_OpenCV_1.cpp) an example that mixes the usage of the C interface with the C++ one. You will also find it in the sample directory of the OpenCV source code library at the `samples/cpp/tutorial_code/core/interoperability_with_OpenCV_1/interoperability_with_OpenCV_1.cpp` . @@ -132,7 +132,7 @@ output: You may observe a runtime instance of this on the [YouTube here](https://www.youtube.com/watch?v=qckm-zvo31w) and you can [download the source code from here -](https://github.com/opencv/opencv/tree/master/samples/cpp/tutorial_code/core/interoperability_with_OpenCV_1/interoperability_with_OpenCV_1.cpp) +](https://github.com/opencv/opencv/tree/3.4/samples/cpp/tutorial_code/core/interoperability_with_OpenCV_1/interoperability_with_OpenCV_1.cpp) or find it in the `samples/cpp/tutorial_code/core/interoperability_with_OpenCV_1/interoperability_with_OpenCV_1.cpp` of the OpenCV source code library. diff --git a/doc/tutorials/core/mat_the_basic_image_container/mat_the_basic_image_container.markdown b/doc/tutorials/core/mat_the_basic_image_container/mat_the_basic_image_container.markdown index 9e4bd8a223..a7c2337008 100644 --- a/doc/tutorials/core/mat_the_basic_image_container/mat_the_basic_image_container.markdown +++ b/doc/tutorials/core/mat_the_basic_image_container/mat_the_basic_image_container.markdown @@ -260,7 +260,7 @@ OpenCV offers support for output of other common OpenCV data structures too via ![](images/MatBasicContainerOut15.png) Most of the samples here have been included in a small console application. You can download it from -[here](https://github.com/opencv/opencv/tree/master/samples/cpp/tutorial_code/core/mat_the_basic_image_container/mat_the_basic_image_container.cpp) +[here](https://github.com/opencv/opencv/tree/3.4/samples/cpp/tutorial_code/core/mat_the_basic_image_container/mat_the_basic_image_container.cpp) or in the core section of the cpp samples. You can also find a quick video demonstration of this on diff --git a/doc/tutorials/dnn/dnn_custom_layers/dnn_custom_layers.md b/doc/tutorials/dnn/dnn_custom_layers/dnn_custom_layers.md index f367946620..0486b31e14 100644 --- a/doc/tutorials/dnn/dnn_custom_layers/dnn_custom_layers.md +++ b/doc/tutorials/dnn/dnn_custom_layers/dnn_custom_layers.md @@ -216,7 +216,7 @@ a centric one. @snippet dnn/edge_detection.py Register That's it! We've replaced an implemented OpenCV's layer to a custom one. -You may find a full script in the [source code](https://github.com/opencv/opencv/tree/master/samples/dnn/edge_detection.py). +You may find a full script in the [source code](https://github.com/opencv/opencv/tree/3.4/samples/dnn/edge_detection.py). diff --git a/doc/tutorials/dnn/dnn_googlenet/dnn_googlenet.markdown b/doc/tutorials/dnn/dnn_googlenet/dnn_googlenet.markdown index 50946b1ba4..e65f70044b 100644 --- a/doc/tutorials/dnn/dnn_googlenet/dnn_googlenet.markdown +++ b/doc/tutorials/dnn/dnn_googlenet/dnn_googlenet.markdown @@ -25,7 +25,7 @@ Explanation [bvlc_googlenet.caffemodel](http://dl.caffe.berkeleyvision.org/bvlc_googlenet.caffemodel) Also you need file with names of [ILSVRC2012](http://image-net.org/challenges/LSVRC/2012/browse-synsets) classes: - [classification_classes_ILSVRC2012.txt](https://github.com/opencv/opencv/tree/master/samples/dnn/classification_classes_ILSVRC2012.txt). + [classification_classes_ILSVRC2012.txt](https://github.com/opencv/opencv/tree/3.4/samples/dnn/classification_classes_ILSVRC2012.txt). Put these files into working dir of this program example. diff --git a/doc/tutorials/features2d/feature_description/feature_description.markdown b/doc/tutorials/features2d/feature_description/feature_description.markdown index ec3cd0e4c5..01ec0de90e 100644 --- a/doc/tutorials/features2d/feature_description/feature_description.markdown +++ b/doc/tutorials/features2d/feature_description/feature_description.markdown @@ -24,19 +24,19 @@ Code @add_toggle_cpp This tutorial code's is shown lines below. You can also download it from -[here](https://github.com/opencv/opencv/tree/master/samples/cpp/tutorial_code/features2D/feature_description/SURF_matching_Demo.cpp) +[here](https://github.com/opencv/opencv/tree/3.4/samples/cpp/tutorial_code/features2D/feature_description/SURF_matching_Demo.cpp) @include samples/cpp/tutorial_code/features2D/feature_description/SURF_matching_Demo.cpp @end_toggle @add_toggle_java This tutorial code's is shown lines below. You can also download it from -[here](https://github.com/opencv/opencv/tree/master/samples/java/tutorial_code/features2D/feature_description/SURFMatchingDemo.java) +[here](https://github.com/opencv/opencv/tree/3.4/samples/java/tutorial_code/features2D/feature_description/SURFMatchingDemo.java) @include samples/java/tutorial_code/features2D/feature_description/SURFMatchingDemo.java @end_toggle @add_toggle_python This tutorial code's is shown lines below. You can also download it from -[here](https://github.com/opencv/opencv/tree/master/samples/python/tutorial_code/features2D/feature_description/SURF_matching_Demo.py) +[here](https://github.com/opencv/opencv/tree/3.4/samples/python/tutorial_code/features2D/feature_description/SURF_matching_Demo.py) @include samples/python/tutorial_code/features2D/feature_description/SURF_matching_Demo.py @end_toggle diff --git a/doc/tutorials/features2d/feature_detection/feature_detection.markdown b/doc/tutorials/features2d/feature_detection/feature_detection.markdown index d0996512ef..bb2658633b 100644 --- a/doc/tutorials/features2d/feature_detection/feature_detection.markdown +++ b/doc/tutorials/features2d/feature_detection/feature_detection.markdown @@ -22,19 +22,19 @@ Code @add_toggle_cpp This tutorial code's is shown lines below. You can also download it from -[here](https://github.com/opencv/opencv/tree/master/samples/cpp/tutorial_code/features2D/feature_detection/SURF_detection_Demo.cpp) +[here](https://github.com/opencv/opencv/tree/3.4/samples/cpp/tutorial_code/features2D/feature_detection/SURF_detection_Demo.cpp) @include samples/cpp/tutorial_code/features2D/feature_detection/SURF_detection_Demo.cpp @end_toggle @add_toggle_java This tutorial code's is shown lines below. You can also download it from -[here](https://github.com/opencv/opencv/tree/master/samples/java/tutorial_code/features2D/feature_detection/SURFDetectionDemo.java) +[here](https://github.com/opencv/opencv/tree/3.4/samples/java/tutorial_code/features2D/feature_detection/SURFDetectionDemo.java) @include samples/java/tutorial_code/features2D/feature_detection/SURFDetectionDemo.java @end_toggle @add_toggle_python This tutorial code's is shown lines below. You can also download it from -[here](https://github.com/opencv/opencv/tree/master/samples/python/tutorial_code/features2D/feature_detection/SURF_detection_Demo.py) +[here](https://github.com/opencv/opencv/tree/3.4/samples/python/tutorial_code/features2D/feature_detection/SURF_detection_Demo.py) @include samples/python/tutorial_code/features2D/feature_detection/SURF_detection_Demo.py @end_toggle diff --git a/doc/tutorials/features2d/feature_flann_matcher/feature_flann_matcher.markdown b/doc/tutorials/features2d/feature_flann_matcher/feature_flann_matcher.markdown index e7f865c3ce..de04f63504 100644 --- a/doc/tutorials/features2d/feature_flann_matcher/feature_flann_matcher.markdown +++ b/doc/tutorials/features2d/feature_flann_matcher/feature_flann_matcher.markdown @@ -45,19 +45,19 @@ Code @add_toggle_cpp This tutorial code's is shown lines below. You can also download it from -[here](https://github.com/opencv/opencv/tree/master/samples/cpp/tutorial_code/features2D/feature_flann_matcher/SURF_FLANN_matching_Demo.cpp) +[here](https://github.com/opencv/opencv/tree/3.4/samples/cpp/tutorial_code/features2D/feature_flann_matcher/SURF_FLANN_matching_Demo.cpp) @include samples/cpp/tutorial_code/features2D/feature_flann_matcher/SURF_FLANN_matching_Demo.cpp @end_toggle @add_toggle_java This tutorial code's is shown lines below. You can also download it from -[here](https://github.com/opencv/opencv/tree/master/samples/java/tutorial_code/features2D/feature_flann_matcher/SURFFLANNMatchingDemo.java) +[here](https://github.com/opencv/opencv/tree/3.4/samples/java/tutorial_code/features2D/feature_flann_matcher/SURFFLANNMatchingDemo.java) @include samples/java/tutorial_code/features2D/feature_flann_matcher/SURFFLANNMatchingDemo.java @end_toggle @add_toggle_python This tutorial code's is shown lines below. You can also download it from -[here](https://github.com/opencv/opencv/tree/master/samples/python/tutorial_code/features2D/feature_flann_matcher/SURF_FLANN_matching_Demo.py) +[here](https://github.com/opencv/opencv/tree/3.4/samples/python/tutorial_code/features2D/feature_flann_matcher/SURF_FLANN_matching_Demo.py) @include samples/python/tutorial_code/features2D/feature_flann_matcher/SURF_FLANN_matching_Demo.py @end_toggle diff --git a/doc/tutorials/features2d/feature_homography/feature_homography.markdown b/doc/tutorials/features2d/feature_homography/feature_homography.markdown index c4f0c00e55..908f2c69a1 100644 --- a/doc/tutorials/features2d/feature_homography/feature_homography.markdown +++ b/doc/tutorials/features2d/feature_homography/feature_homography.markdown @@ -20,19 +20,19 @@ Code @add_toggle_cpp This tutorial code's is shown lines below. You can also download it from -[here](https://github.com/opencv/opencv/tree/master/samples/cpp/tutorial_code/features2D/feature_homography/SURF_FLANN_matching_homography_Demo.cpp) +[here](https://github.com/opencv/opencv/tree/3.4/samples/cpp/tutorial_code/features2D/feature_homography/SURF_FLANN_matching_homography_Demo.cpp) @include samples/cpp/tutorial_code/features2D/feature_homography/SURF_FLANN_matching_homography_Demo.cpp @end_toggle @add_toggle_java This tutorial code's is shown lines below. You can also download it from -[here](https://github.com/opencv/opencv/tree/master/samples/java/tutorial_code/features2D/feature_homography/SURFFLANNMatchingHomographyDemo.java) +[here](https://github.com/opencv/opencv/tree/3.4/samples/java/tutorial_code/features2D/feature_homography/SURFFLANNMatchingHomographyDemo.java) @include samples/java/tutorial_code/features2D/feature_homography/SURFFLANNMatchingHomographyDemo.java @end_toggle @add_toggle_python This tutorial code's is shown lines below. You can also download it from -[here](https://github.com/opencv/opencv/tree/master/samples/python/tutorial_code/features2D/feature_homography/SURF_FLANN_matching_homography_Demo.py) +[here](https://github.com/opencv/opencv/tree/3.4/samples/python/tutorial_code/features2D/feature_homography/SURF_FLANN_matching_homography_Demo.py) @include samples/python/tutorial_code/features2D/feature_homography/SURF_FLANN_matching_homography_Demo.py @end_toggle diff --git a/doc/tutorials/features2d/homography/homography.markdown b/doc/tutorials/features2d/homography/homography.markdown index 020017f023..1fc8a9e3c4 100644 --- a/doc/tutorials/features2d/homography/homography.markdown +++ b/doc/tutorials/features2d/homography/homography.markdown @@ -12,8 +12,8 @@ For detailed explanations about the theory, please refer to a computer vision co * An Invitation to 3-D Vision: From Images to Geometric Models, @cite Ma:2003:IVI * Computer Vision: Algorithms and Applications, @cite RS10 -The tutorial code can be found [here](https://github.com/opencv/opencv/tree/master/samples/cpp/tutorial_code/features2D/Homography). -The images used in this tutorial can be found [here](https://github.com/opencv/opencv/tree/master/samples/data) (`left*.jpg`). +The tutorial code can be found [here](https://github.com/opencv/opencv/tree/3.4/samples/cpp/tutorial_code/features2D/Homography). +The images used in this tutorial can be found [here](https://github.com/opencv/opencv/tree/3.4/samples/data) (`left*.jpg`). Basic theory {#tutorial_homography_Basic_theory} ------------ diff --git a/doc/tutorials/features2d/trackingmotion/corner_subpixels/corner_subpixels.markdown b/doc/tutorials/features2d/trackingmotion/corner_subpixels/corner_subpixels.markdown index 82b33dd256..514fd332c0 100644 --- a/doc/tutorials/features2d/trackingmotion/corner_subpixels/corner_subpixels.markdown +++ b/doc/tutorials/features2d/trackingmotion/corner_subpixels/corner_subpixels.markdown @@ -17,19 +17,19 @@ Code @add_toggle_cpp This tutorial code's is shown lines below. You can also download it from -[here](https://github.com/opencv/opencv/tree/master/samples/cpp/tutorial_code/TrackingMotion/cornerSubPix_Demo.cpp) +[here](https://github.com/opencv/opencv/tree/3.4/samples/cpp/tutorial_code/TrackingMotion/cornerSubPix_Demo.cpp) @include samples/cpp/tutorial_code/TrackingMotion/cornerSubPix_Demo.cpp @end_toggle @add_toggle_java This tutorial code's is shown lines below. You can also download it from -[here](https://github.com/opencv/opencv/tree/master/samples/java/tutorial_code/TrackingMotion/corner_subpixels/CornerSubPixDemo.java) +[here](https://github.com/opencv/opencv/tree/3.4/samples/java/tutorial_code/TrackingMotion/corner_subpixels/CornerSubPixDemo.java) @include samples/java/tutorial_code/TrackingMotion/corner_subpixels/CornerSubPixDemo.java @end_toggle @add_toggle_python This tutorial code's is shown lines below. You can also download it from -[here](https://github.com/opencv/opencv/tree/master/samples/python/tutorial_code/TrackingMotion/corner_subpixels/cornerSubPix_Demo.py) +[here](https://github.com/opencv/opencv/tree/3.4/samples/python/tutorial_code/TrackingMotion/corner_subpixels/cornerSubPix_Demo.py) @include samples/python/tutorial_code/TrackingMotion/corner_subpixels/cornerSubPix_Demo.py @end_toggle diff --git a/doc/tutorials/features2d/trackingmotion/generic_corner_detector/generic_corner_detector.markdown b/doc/tutorials/features2d/trackingmotion/generic_corner_detector/generic_corner_detector.markdown index f10d3efe4e..aded0d1c98 100644 --- a/doc/tutorials/features2d/trackingmotion/generic_corner_detector/generic_corner_detector.markdown +++ b/doc/tutorials/features2d/trackingmotion/generic_corner_detector/generic_corner_detector.markdown @@ -21,21 +21,21 @@ Code @add_toggle_cpp This tutorial code's is shown lines below. You can also download it from -[here](https://github.com/opencv/opencv/tree/master/samples/cpp/tutorial_code/TrackingMotion/cornerDetector_Demo.cpp) +[here](https://github.com/opencv/opencv/tree/3.4/samples/cpp/tutorial_code/TrackingMotion/cornerDetector_Demo.cpp) @include samples/cpp/tutorial_code/TrackingMotion/cornerDetector_Demo.cpp @end_toggle @add_toggle_java This tutorial code's is shown lines below. You can also download it from -[here](https://github.com/opencv/opencv/tree/master/samples/java/tutorial_code/TrackingMotion/generic_corner_detector/CornerDetectorDemo.java) +[here](https://github.com/opencv/opencv/tree/3.4/samples/java/tutorial_code/TrackingMotion/generic_corner_detector/CornerDetectorDemo.java) @include samples/java/tutorial_code/TrackingMotion/generic_corner_detector/CornerDetectorDemo.java @end_toggle @add_toggle_python This tutorial code's is shown lines below. You can also download it from -[here](https://github.com/opencv/opencv/tree/master/samples/python/tutorial_code/TrackingMotion/generic_corner_detector/cornerDetector_Demo.py) +[here](https://github.com/opencv/opencv/tree/3.4/samples/python/tutorial_code/TrackingMotion/generic_corner_detector/cornerDetector_Demo.py) @include samples/python/tutorial_code/TrackingMotion/generic_corner_detector/cornerDetector_Demo.py @end_toggle diff --git a/doc/tutorials/features2d/trackingmotion/good_features_to_track/good_features_to_track.markdown b/doc/tutorials/features2d/trackingmotion/good_features_to_track/good_features_to_track.markdown index 70d25ab9e2..1c5c7029cf 100644 --- a/doc/tutorials/features2d/trackingmotion/good_features_to_track/good_features_to_track.markdown +++ b/doc/tutorials/features2d/trackingmotion/good_features_to_track/good_features_to_track.markdown @@ -16,19 +16,19 @@ Code @add_toggle_cpp This tutorial code's is shown lines below. You can also download it from -[here](https://github.com/opencv/opencv/tree/master/samples/cpp/tutorial_code/TrackingMotion/goodFeaturesToTrack_Demo.cpp) +[here](https://github.com/opencv/opencv/tree/3.4/samples/cpp/tutorial_code/TrackingMotion/goodFeaturesToTrack_Demo.cpp) @include samples/cpp/tutorial_code/TrackingMotion/goodFeaturesToTrack_Demo.cpp @end_toggle @add_toggle_java This tutorial code's is shown lines below. You can also download it from -[here](https://github.com/opencv/opencv/tree/master/samples/java/tutorial_code/TrackingMotion/good_features_to_track/GoodFeaturesToTrackDemo.java) +[here](https://github.com/opencv/opencv/tree/3.4/samples/java/tutorial_code/TrackingMotion/good_features_to_track/GoodFeaturesToTrackDemo.java) @include samples/java/tutorial_code/TrackingMotion/good_features_to_track/GoodFeaturesToTrackDemo.java @end_toggle @add_toggle_python This tutorial code's is shown lines below. You can also download it from -[here](https://github.com/opencv/opencv/tree/master/samples/python/tutorial_code/TrackingMotion/good_features_to_track/goodFeaturesToTrack_Demo.py) +[here](https://github.com/opencv/opencv/tree/3.4/samples/python/tutorial_code/TrackingMotion/good_features_to_track/goodFeaturesToTrack_Demo.py) @include samples/python/tutorial_code/TrackingMotion/good_features_to_track/goodFeaturesToTrack_Demo.py @end_toggle diff --git a/doc/tutorials/features2d/trackingmotion/harris_detector/harris_detector.markdown b/doc/tutorials/features2d/trackingmotion/harris_detector/harris_detector.markdown index bbf4fdbd5b..a59a2ad3af 100644 --- a/doc/tutorials/features2d/trackingmotion/harris_detector/harris_detector.markdown +++ b/doc/tutorials/features2d/trackingmotion/harris_detector/harris_detector.markdown @@ -120,19 +120,19 @@ Code @add_toggle_cpp This tutorial code's is shown lines below. You can also download it from -[here](https://github.com/opencv/opencv/tree/master/samples/cpp/tutorial_code/TrackingMotion/cornerHarris_Demo.cpp) +[here](https://github.com/opencv/opencv/tree/3.4/samples/cpp/tutorial_code/TrackingMotion/cornerHarris_Demo.cpp) @include samples/cpp/tutorial_code/TrackingMotion/cornerHarris_Demo.cpp @end_toggle @add_toggle_java This tutorial code's is shown lines below. You can also download it from -[here](https://github.com/opencv/opencv/tree/master/samples/java/tutorial_code/TrackingMotion/harris_detector/CornerHarrisDemo.java) +[here](https://github.com/opencv/opencv/tree/3.4/samples/java/tutorial_code/TrackingMotion/harris_detector/CornerHarrisDemo.java) @include samples/java/tutorial_code/TrackingMotion/harris_detector/CornerHarrisDemo.java @end_toggle @add_toggle_python This tutorial code's is shown lines below. You can also download it from -[here](https://github.com/opencv/opencv/tree/master/samples/python/tutorial_code/TrackingMotion/harris_detector/cornerHarris_Demo.py) +[here](https://github.com/opencv/opencv/tree/3.4/samples/python/tutorial_code/TrackingMotion/harris_detector/cornerHarris_Demo.py) @include samples/python/tutorial_code/TrackingMotion/harris_detector/cornerHarris_Demo.py @end_toggle diff --git a/doc/tutorials/gpu/gpu-basics-similarity/gpu_basics_similarity.markdown b/doc/tutorials/gpu/gpu-basics-similarity/gpu_basics_similarity.markdown index 9452bc38d7..1ef0e74311 100644 --- a/doc/tutorials/gpu/gpu-basics-similarity/gpu_basics_similarity.markdown +++ b/doc/tutorials/gpu/gpu-basics-similarity/gpu_basics_similarity.markdown @@ -24,7 +24,7 @@ The source code You may also find the source code and the video file in the `samples/cpp/tutorial_code/gpu/gpu-basics-similarity/gpu-basics-similarity` directory of the OpenCV -source library or download it from [here](https://github.com/opencv/opencv/tree/master/samples/cpp/tutorial_code/gpu/gpu-basics-similarity/gpu-basics-similarity.cpp). +source library or download it from [here](https://github.com/opencv/opencv/tree/3.4/samples/cpp/tutorial_code/gpu/gpu-basics-similarity/gpu-basics-similarity.cpp). The full source code is quite long (due to the controlling of the application via the command line arguments and performance measurement). Therefore, to avoid cluttering up these sections with those you'll find here only the functions itself. diff --git a/doc/tutorials/highgui/trackbar/trackbar.markdown b/doc/tutorials/highgui/trackbar/trackbar.markdown index d6700d6387..0613d7d5e8 100644 --- a/doc/tutorials/highgui/trackbar/trackbar.markdown +++ b/doc/tutorials/highgui/trackbar/trackbar.markdown @@ -27,19 +27,19 @@ Let's modify the program made in the tutorial @ref tutorial_adding_images. We wi @add_toggle_cpp This tutorial code's is shown lines below. You can also download it from -[here](https://github.com/opencv/opencv/tree/master/samples/cpp/tutorial_code/HighGUI/AddingImagesTrackbar.cpp) +[here](https://github.com/opencv/opencv/tree/3.4/samples/cpp/tutorial_code/HighGUI/AddingImagesTrackbar.cpp) @include cpp/tutorial_code/HighGUI/AddingImagesTrackbar.cpp @end_toggle @add_toggle_java This tutorial code's is shown lines below. You can also download it from -[here](https://github.com/opencv/opencv/tree/master/samples/java/tutorial_code/highgui/trackbar/AddingImagesTrackbar.java) +[here](https://github.com/opencv/opencv/tree/3.4/samples/java/tutorial_code/highgui/trackbar/AddingImagesTrackbar.java) @include java/tutorial_code/highgui/trackbar/AddingImagesTrackbar.java @end_toggle @add_toggle_python This tutorial code's is shown lines below. You can also download it from -[here](https://github.com/opencv/opencv/tree/master/samples/python/tutorial_code/highgui/trackbar/AddingImagesTrackbar.py) +[here](https://github.com/opencv/opencv/tree/3.4/samples/python/tutorial_code/highgui/trackbar/AddingImagesTrackbar.py) @include python/tutorial_code/highgui/trackbar/AddingImagesTrackbar.py @end_toggle diff --git a/doc/tutorials/imgproc/erosion_dilatation/erosion_dilatation.markdown b/doc/tutorials/imgproc/erosion_dilatation/erosion_dilatation.markdown index 8afcd2dea8..feca4e9aa6 100644 --- a/doc/tutorials/imgproc/erosion_dilatation/erosion_dilatation.markdown +++ b/doc/tutorials/imgproc/erosion_dilatation/erosion_dilatation.markdown @@ -62,19 +62,19 @@ Code @add_toggle_cpp This tutorial's code is shown below. You can also download it -[here](https://github.com/opencv/opencv/tree/master/samples/cpp/tutorial_code/ImgProc/Morphology_1.cpp) +[here](https://github.com/opencv/opencv/tree/3.4/samples/cpp/tutorial_code/ImgProc/Morphology_1.cpp) @include samples/cpp/tutorial_code/ImgProc/Morphology_1.cpp @end_toggle @add_toggle_java This tutorial's code is shown below. You can also download it -[here](https://github.com/opencv/opencv/tree/master/samples/java/tutorial_code/ImgProc/erosion_dilatation/MorphologyDemo1.java) +[here](https://github.com/opencv/opencv/tree/3.4/samples/java/tutorial_code/ImgProc/erosion_dilatation/MorphologyDemo1.java) @include samples/java/tutorial_code/ImgProc/erosion_dilatation/MorphologyDemo1.java @end_toggle @add_toggle_python This tutorial's code is shown below. You can also download it -[here](https://github.com/opencv/opencv/tree/master/samples/python/tutorial_code/imgProc/erosion_dilatation/morphology_1.py) +[here](https://github.com/opencv/opencv/tree/3.4/samples/python/tutorial_code/imgProc/erosion_dilatation/morphology_1.py) @include samples/python/tutorial_code/imgProc/erosion_dilatation/morphology_1.py @end_toggle diff --git a/doc/tutorials/imgproc/histograms/back_projection/back_projection.markdown b/doc/tutorials/imgproc/histograms/back_projection/back_projection.markdown index 4b67f0baae..a2a894a459 100644 --- a/doc/tutorials/imgproc/histograms/back_projection/back_projection.markdown +++ b/doc/tutorials/imgproc/histograms/back_projection/back_projection.markdown @@ -71,13 +71,13 @@ Code @add_toggle_cpp - **Downloadable code**: - Click - [here](https://github.com/opencv/opencv/tree/master/samples/cpp/tutorial_code/Histograms_Matching/calcBackProject_Demo1.cpp) + [here](https://github.com/opencv/opencv/tree/3.4/samples/cpp/tutorial_code/Histograms_Matching/calcBackProject_Demo1.cpp) for the basic version (explained in this tutorial). - For stuff slightly fancier (using H-S histograms and floodFill to define a mask for the skin area) you can check the [improved - demo](https://github.com/opencv/opencv/tree/master/samples/cpp/tutorial_code/Histograms_Matching/calcBackProject_Demo2.cpp) + demo](https://github.com/opencv/opencv/tree/3.4/samples/cpp/tutorial_code/Histograms_Matching/calcBackProject_Demo2.cpp) - ...or you can always check out the classical - [camshiftdemo](https://github.com/opencv/opencv/tree/master/samples/cpp/camshiftdemo.cpp) + [camshiftdemo](https://github.com/opencv/opencv/tree/3.4/samples/cpp/camshiftdemo.cpp) in samples. - **Code at glance:** @@ -87,13 +87,13 @@ Code @add_toggle_java - **Downloadable code**: - Click - [here](https://github.com/opencv/opencv/tree/master/samples/java/tutorial_code/Histograms_Matching/back_projection/CalcBackProjectDemo1.java) + [here](https://github.com/opencv/opencv/tree/3.4/samples/java/tutorial_code/Histograms_Matching/back_projection/CalcBackProjectDemo1.java) for the basic version (explained in this tutorial). - For stuff slightly fancier (using H-S histograms and floodFill to define a mask for the skin area) you can check the [improved - demo](https://github.com/opencv/opencv/tree/master/samples/java/tutorial_code/Histograms_Matching/back_projection/CalcBackProjectDemo2.java) + demo](https://github.com/opencv/opencv/tree/3.4/samples/java/tutorial_code/Histograms_Matching/back_projection/CalcBackProjectDemo2.java) - ...or you can always check out the classical - [camshiftdemo](https://github.com/opencv/opencv/tree/master/samples/cpp/camshiftdemo.cpp) + [camshiftdemo](https://github.com/opencv/opencv/tree/3.4/samples/cpp/camshiftdemo.cpp) in samples. - **Code at glance:** @@ -103,13 +103,13 @@ Code @add_toggle_python - **Downloadable code**: - Click - [here](https://github.com/opencv/opencv/tree/master/samples/python/tutorial_code/Histograms_Matching/back_projection/calcBackProject_Demo1.py) + [here](https://github.com/opencv/opencv/tree/3.4/samples/python/tutorial_code/Histograms_Matching/back_projection/calcBackProject_Demo1.py) for the basic version (explained in this tutorial). - For stuff slightly fancier (using H-S histograms and floodFill to define a mask for the skin area) you can check the [improved - demo](https://github.com/opencv/opencv/tree/master/samples/python/tutorial_code/Histograms_Matching/back_projection/calcBackProject_Demo2.py) + demo](https://github.com/opencv/opencv/tree/3.4/samples/python/tutorial_code/Histograms_Matching/back_projection/calcBackProject_Demo2.py) - ...or you can always check out the classical - [camshiftdemo](https://github.com/opencv/opencv/tree/master/samples/cpp/camshiftdemo.cpp) + [camshiftdemo](https://github.com/opencv/opencv/tree/3.4/samples/cpp/camshiftdemo.cpp) in samples. - **Code at glance:** diff --git a/doc/tutorials/imgproc/histograms/histogram_calculation/histogram_calculation.markdown b/doc/tutorials/imgproc/histograms/histogram_calculation/histogram_calculation.markdown index 30930cd7db..8410b9267b 100644 --- a/doc/tutorials/imgproc/histograms/histogram_calculation/histogram_calculation.markdown +++ b/doc/tutorials/imgproc/histograms/histogram_calculation/histogram_calculation.markdown @@ -69,7 +69,7 @@ Code @add_toggle_cpp - **Downloadable code**: Click - [here](https://github.com/opencv/opencv/tree/master/samples/cpp/tutorial_code/Histograms_Matching/calcHist_Demo.cpp) + [here](https://github.com/opencv/opencv/tree/3.4/samples/cpp/tutorial_code/Histograms_Matching/calcHist_Demo.cpp) - **Code at glance:** @include samples/cpp/tutorial_code/Histograms_Matching/calcHist_Demo.cpp @@ -77,7 +77,7 @@ Code @add_toggle_java - **Downloadable code**: Click - [here](https://github.com/opencv/opencv/tree/master/samples/java/tutorial_code/Histograms_Matching/histogram_calculation/CalcHistDemo.java) + [here](https://github.com/opencv/opencv/tree/3.4/samples/java/tutorial_code/Histograms_Matching/histogram_calculation/CalcHistDemo.java) - **Code at glance:** @include samples/java/tutorial_code/Histograms_Matching/histogram_calculation/CalcHistDemo.java @@ -85,7 +85,7 @@ Code @add_toggle_python - **Downloadable code**: Click - [here](https://github.com/opencv/opencv/tree/master/samples/python/tutorial_code/Histograms_Matching/histogram_calculation/calcHist_Demo.py) + [here](https://github.com/opencv/opencv/tree/3.4/samples/python/tutorial_code/Histograms_Matching/histogram_calculation/calcHist_Demo.py) - **Code at glance:** @include samples/python/tutorial_code/Histograms_Matching/histogram_calculation/calcHist_Demo.py diff --git a/doc/tutorials/imgproc/histograms/histogram_comparison/histogram_comparison.markdown b/doc/tutorials/imgproc/histograms/histogram_comparison/histogram_comparison.markdown index 23c478a03c..8577067372 100644 --- a/doc/tutorials/imgproc/histograms/histogram_comparison/histogram_comparison.markdown +++ b/doc/tutorials/imgproc/histograms/histogram_comparison/histogram_comparison.markdown @@ -46,7 +46,7 @@ Code @add_toggle_cpp - **Downloadable code**: Click - [here](https://github.com/opencv/opencv/tree/master/samples/cpp/tutorial_code/Histograms_Matching/compareHist_Demo.cpp) + [here](https://github.com/opencv/opencv/tree/3.4/samples/cpp/tutorial_code/Histograms_Matching/compareHist_Demo.cpp) - **Code at glance:** @include samples/cpp/tutorial_code/Histograms_Matching/compareHist_Demo.cpp @@ -54,7 +54,7 @@ Code @add_toggle_java - **Downloadable code**: Click - [here](https://github.com/opencv/opencv/tree/master/samples/java/tutorial_code/Histograms_Matching/histogram_comparison/CompareHistDemo.java) + [here](https://github.com/opencv/opencv/tree/3.4/samples/java/tutorial_code/Histograms_Matching/histogram_comparison/CompareHistDemo.java) - **Code at glance:** @include samples/java/tutorial_code/Histograms_Matching/histogram_comparison/CompareHistDemo.java @@ -62,7 +62,7 @@ Code @add_toggle_python - **Downloadable code**: Click - [here](https://github.com/opencv/opencv/tree/master/samples/python/tutorial_code/Histograms_Matching/histogram_comparison/compareHist_Demo.py) + [here](https://github.com/opencv/opencv/tree/3.4/samples/python/tutorial_code/Histograms_Matching/histogram_comparison/compareHist_Demo.py) - **Code at glance:** @include samples/python/tutorial_code/Histograms_Matching/histogram_comparison/compareHist_Demo.py diff --git a/doc/tutorials/imgproc/histograms/histogram_equalization/histogram_equalization.markdown b/doc/tutorials/imgproc/histograms/histogram_equalization/histogram_equalization.markdown index 5a452a78f7..f5edd73ac7 100644 --- a/doc/tutorials/imgproc/histograms/histogram_equalization/histogram_equalization.markdown +++ b/doc/tutorials/imgproc/histograms/histogram_equalization/histogram_equalization.markdown @@ -64,7 +64,7 @@ Code @add_toggle_cpp - **Downloadable code**: Click - [here](https://github.com/opencv/opencv/tree/master/samples/cpp/tutorial_code/Histograms_Matching/EqualizeHist_Demo.cpp) + [here](https://github.com/opencv/opencv/tree/3.4/samples/cpp/tutorial_code/Histograms_Matching/EqualizeHist_Demo.cpp) - **Code at glance:** @include samples/cpp/tutorial_code/Histograms_Matching/EqualizeHist_Demo.cpp @@ -72,7 +72,7 @@ Code @add_toggle_java - **Downloadable code**: Click - [here](https://github.com/opencv/opencv/tree/master/samples/java/tutorial_code/Histograms_Matching/histogram_equalization/EqualizeHistDemo.java) + [here](https://github.com/opencv/opencv/tree/3.4/samples/java/tutorial_code/Histograms_Matching/histogram_equalization/EqualizeHistDemo.java) - **Code at glance:** @include samples/java/tutorial_code/Histograms_Matching/histogram_equalization/EqualizeHistDemo.java @@ -80,7 +80,7 @@ Code @add_toggle_python - **Downloadable code**: Click - [here](https://github.com/opencv/opencv/tree/master/samples/python/tutorial_code/Histograms_Matching/histogram_equalization/EqualizeHist_Demo.py) + [here](https://github.com/opencv/opencv/tree/3.4/samples/python/tutorial_code/Histograms_Matching/histogram_equalization/EqualizeHist_Demo.py) - **Code at glance:** @include samples/python/tutorial_code/Histograms_Matching/histogram_equalization/EqualizeHist_Demo.py diff --git a/doc/tutorials/imgproc/histograms/template_matching/template_matching.markdown b/doc/tutorials/imgproc/histograms/template_matching/template_matching.markdown index 1189923a98..c5f22330cf 100644 --- a/doc/tutorials/imgproc/histograms/template_matching/template_matching.markdown +++ b/doc/tutorials/imgproc/histograms/template_matching/template_matching.markdown @@ -130,7 +130,7 @@ Code @add_toggle_cpp - **Downloadable code**: Click - [here](https://github.com/opencv/opencv/tree/master/samples/cpp/tutorial_code/Histograms_Matching/MatchTemplate_Demo.cpp) + [here](https://github.com/opencv/opencv/tree/3.4/samples/cpp/tutorial_code/Histograms_Matching/MatchTemplate_Demo.cpp) - **Code at glance:** @include samples/cpp/tutorial_code/Histograms_Matching/MatchTemplate_Demo.cpp @@ -139,7 +139,7 @@ Code @add_toggle_java - **Downloadable code**: Click - [here](https://github.com/opencv/opencv/tree/master/samples/java/tutorial_code/ImgProc/tutorial_template_matching/MatchTemplateDemo.java) + [here](https://github.com/opencv/opencv/tree/3.4/samples/java/tutorial_code/ImgProc/tutorial_template_matching/MatchTemplateDemo.java) - **Code at glance:** @include samples/java/tutorial_code/ImgProc/tutorial_template_matching/MatchTemplateDemo.java @@ -148,7 +148,7 @@ Code @add_toggle_python - **Downloadable code**: Click - [here](https://github.com/opencv/opencv/tree/master/samples/python/tutorial_code/imgProc/match_template/match_template.py) + [here](https://github.com/opencv/opencv/tree/3.4/samples/python/tutorial_code/imgProc/match_template/match_template.py) - **Code at glance:** @include samples/python/tutorial_code/imgProc/match_template/match_template.py diff --git a/doc/tutorials/imgproc/imgtrans/canny_detector/canny_detector.markdown b/doc/tutorials/imgproc/imgtrans/canny_detector/canny_detector.markdown index cf3cc270e1..cebf43ce7b 100644 --- a/doc/tutorials/imgproc/imgtrans/canny_detector/canny_detector.markdown +++ b/doc/tutorials/imgproc/imgtrans/canny_detector/canny_detector.markdown @@ -68,19 +68,19 @@ Code @add_toggle_cpp - The tutorial code's is shown lines below. You can also download it from - [here](https://github.com/opencv/opencv/tree/master/samples/cpp/tutorial_code/ImgTrans/CannyDetector_Demo.cpp) + [here](https://github.com/opencv/opencv/tree/3.4/samples/cpp/tutorial_code/ImgTrans/CannyDetector_Demo.cpp) @include samples/cpp/tutorial_code/ImgTrans/CannyDetector_Demo.cpp @end_toggle @add_toggle_java - The tutorial code's is shown lines below. You can also download it from - [here](https://github.com/opencv/opencv/tree/master/samples/java/tutorial_code/ImgTrans/canny_detector/CannyDetectorDemo.java) + [here](https://github.com/opencv/opencv/tree/3.4/samples/java/tutorial_code/ImgTrans/canny_detector/CannyDetectorDemo.java) @include samples/java/tutorial_code/ImgTrans/canny_detector/CannyDetectorDemo.java @end_toggle @add_toggle_python - The tutorial code's is shown lines below. You can also download it from - [here](https://github.com/opencv/opencv/tree/master/samples/python/tutorial_code/ImgTrans/canny_detector/CannyDetector_Demo.py) + [here](https://github.com/opencv/opencv/tree/3.4/samples/python/tutorial_code/ImgTrans/canny_detector/CannyDetector_Demo.py) @include samples/python/tutorial_code/ImgTrans/canny_detector/CannyDetector_Demo.py @end_toggle diff --git a/doc/tutorials/imgproc/imgtrans/distance_transformation/distance_transform.markdown b/doc/tutorials/imgproc/imgtrans/distance_transformation/distance_transform.markdown index a46f578feb..12ef87fc7d 100644 --- a/doc/tutorials/imgproc/imgtrans/distance_transformation/distance_transform.markdown +++ b/doc/tutorials/imgproc/imgtrans/distance_transformation/distance_transform.markdown @@ -17,7 +17,7 @@ Code ---- This tutorial code's is shown lines below. You can also download it from - [here](https://github.com/opencv/opencv/tree/master/samples/cpp/tutorial_code/ImgTrans/imageSegmentation.cpp). + [here](https://github.com/opencv/opencv/tree/3.4/samples/cpp/tutorial_code/ImgTrans/imageSegmentation.cpp). @include samples/cpp/tutorial_code/ImgTrans/imageSegmentation.cpp Explanation / Result diff --git a/doc/tutorials/imgproc/imgtrans/remap/remap.markdown b/doc/tutorials/imgproc/imgtrans/remap/remap.markdown index 9c069c0e1a..53cba14516 100644 --- a/doc/tutorials/imgproc/imgtrans/remap/remap.markdown +++ b/doc/tutorials/imgproc/imgtrans/remap/remap.markdown @@ -53,19 +53,19 @@ Code @add_toggle_cpp - The tutorial code's is shown lines below. You can also download it from - [here](https://github.com/opencv/opencv/tree/master/samples/cpp/tutorial_code/ImgTrans/Remap_Demo.cpp) + [here](https://github.com/opencv/opencv/tree/3.4/samples/cpp/tutorial_code/ImgTrans/Remap_Demo.cpp) @include samples/cpp/tutorial_code/ImgTrans/Remap_Demo.cpp @end_toggle @add_toggle_java - The tutorial code's is shown lines below. You can also download it from - [here](https://github.com/opencv/opencv/tree/master/samples/java/tutorial_code/ImgTrans/remap/RemapDemo.java) + [here](https://github.com/opencv/opencv/tree/3.4/samples/java/tutorial_code/ImgTrans/remap/RemapDemo.java) @include samples/java/tutorial_code/ImgTrans/remap/RemapDemo.java @end_toggle @add_toggle_python - The tutorial code's is shown lines below. You can also download it from - [here](https://github.com/opencv/opencv/tree/master/samples/python/tutorial_code/ImgTrans/remap/Remap_Demo.py) + [here](https://github.com/opencv/opencv/tree/3.4/samples/python/tutorial_code/ImgTrans/remap/Remap_Demo.py) @include samples/python/tutorial_code/ImgTrans/remap/Remap_Demo.py @end_toggle diff --git a/doc/tutorials/imgproc/opening_closing_hats/opening_closing_hats.markdown b/doc/tutorials/imgproc/opening_closing_hats/opening_closing_hats.markdown index f5042907ef..328b8aa3a9 100644 --- a/doc/tutorials/imgproc/opening_closing_hats/opening_closing_hats.markdown +++ b/doc/tutorials/imgproc/opening_closing_hats/opening_closing_hats.markdown @@ -81,19 +81,19 @@ Code @add_toggle_cpp This tutorial's code is shown below. You can also download it -[here](https://github.com/opencv/opencv/tree/master/samples/cpp/tutorial_code/ImgProc/Morphology_2.cpp) +[here](https://github.com/opencv/opencv/tree/3.4/samples/cpp/tutorial_code/ImgProc/Morphology_2.cpp) @include cpp/tutorial_code/ImgProc/Morphology_2.cpp @end_toggle @add_toggle_java This tutorial's code is shown below. You can also download it -[here](https://github.com/opencv/opencv/tree/master/samples/java/tutorial_code/ImgProc/opening_closing_hats/MorphologyDemo2.java) +[here](https://github.com/opencv/opencv/tree/3.4/samples/java/tutorial_code/ImgProc/opening_closing_hats/MorphologyDemo2.java) @include java/tutorial_code/ImgProc/opening_closing_hats/MorphologyDemo2.java @end_toggle @add_toggle_python This tutorial's code is shown below. You can also download it -[here](https://github.com/opencv/opencv/tree/master/samples/python/tutorial_code/imgProc/opening_closing_hats/morphology_2.py) +[here](https://github.com/opencv/opencv/tree/3.4/samples/python/tutorial_code/imgProc/opening_closing_hats/morphology_2.py) @include python/tutorial_code/imgProc/opening_closing_hats/morphology_2.py @end_toggle diff --git a/doc/tutorials/imgproc/shapedescriptors/bounding_rects_circles/bounding_rects_circles.markdown b/doc/tutorials/imgproc/shapedescriptors/bounding_rects_circles/bounding_rects_circles.markdown index 56d886d89f..978b900ab2 100644 --- a/doc/tutorials/imgproc/shapedescriptors/bounding_rects_circles/bounding_rects_circles.markdown +++ b/doc/tutorials/imgproc/shapedescriptors/bounding_rects_circles/bounding_rects_circles.markdown @@ -16,7 +16,7 @@ Code ---- This tutorial code's is shown lines below. You can also download it from -[here](https://github.com/opencv/opencv/tree/master/samples/cpp/tutorial_code/ShapeDescriptors/generalContours_demo1.cpp) +[here](https://github.com/opencv/opencv/tree/3.4/samples/cpp/tutorial_code/ShapeDescriptors/generalContours_demo1.cpp) @include samples/cpp/tutorial_code/ShapeDescriptors/generalContours_demo1.cpp Explanation diff --git a/doc/tutorials/imgproc/shapedescriptors/bounding_rotated_ellipses/bounding_rotated_ellipses.markdown b/doc/tutorials/imgproc/shapedescriptors/bounding_rotated_ellipses/bounding_rotated_ellipses.markdown index e7b3a94553..eb21cf5bc7 100644 --- a/doc/tutorials/imgproc/shapedescriptors/bounding_rotated_ellipses/bounding_rotated_ellipses.markdown +++ b/doc/tutorials/imgproc/shapedescriptors/bounding_rotated_ellipses/bounding_rotated_ellipses.markdown @@ -16,7 +16,7 @@ Code ---- This tutorial code's is shown lines below. You can also download it from -[here](https://github.com/opencv/opencv/tree/master/samples/cpp/tutorial_code/ShapeDescriptors/generalContours_demo2.cpp) +[here](https://github.com/opencv/opencv/tree/3.4/samples/cpp/tutorial_code/ShapeDescriptors/generalContours_demo2.cpp) @include samples/cpp/tutorial_code/ShapeDescriptors/generalContours_demo2.cpp Explanation diff --git a/doc/tutorials/imgproc/shapedescriptors/find_contours/find_contours.markdown b/doc/tutorials/imgproc/shapedescriptors/find_contours/find_contours.markdown index 11d1d9f476..af467bdc72 100644 --- a/doc/tutorials/imgproc/shapedescriptors/find_contours/find_contours.markdown +++ b/doc/tutorials/imgproc/shapedescriptors/find_contours/find_contours.markdown @@ -16,7 +16,7 @@ Code ---- This tutorial code's is shown lines below. You can also download it from -[here](https://github.com/opencv/opencv/tree/master/samples/cpp/tutorial_code/ShapeDescriptors/findContours_demo.cpp) +[here](https://github.com/opencv/opencv/tree/3.4/samples/cpp/tutorial_code/ShapeDescriptors/findContours_demo.cpp) @include samples/cpp/tutorial_code/ShapeDescriptors/findContours_demo.cpp Explanation diff --git a/doc/tutorials/imgproc/shapedescriptors/hull/hull.markdown b/doc/tutorials/imgproc/shapedescriptors/hull/hull.markdown index 38df72029e..cfb9241b81 100644 --- a/doc/tutorials/imgproc/shapedescriptors/hull/hull.markdown +++ b/doc/tutorials/imgproc/shapedescriptors/hull/hull.markdown @@ -15,7 +15,7 @@ Code ---- This tutorial code's is shown lines below. You can also download it from -[here](https://github.com/opencv/opencv/tree/master/samples/cpp/tutorial_code/ShapeDescriptors/hull_demo.cpp) +[here](https://github.com/opencv/opencv/tree/3.4/samples/cpp/tutorial_code/ShapeDescriptors/hull_demo.cpp) @include samples/cpp/tutorial_code/ShapeDescriptors/hull_demo.cpp diff --git a/doc/tutorials/imgproc/shapedescriptors/moments/moments.markdown b/doc/tutorials/imgproc/shapedescriptors/moments/moments.markdown index 3ef4c13615..231ff37500 100644 --- a/doc/tutorials/imgproc/shapedescriptors/moments/moments.markdown +++ b/doc/tutorials/imgproc/shapedescriptors/moments/moments.markdown @@ -17,7 +17,7 @@ Code ---- This tutorial code's is shown lines below. You can also download it from -[here](https://github.com/opencv/opencv/tree/master/samples/cpp/tutorial_code/ShapeDescriptors/moments_demo.cpp) +[here](https://github.com/opencv/opencv/tree/3.4/samples/cpp/tutorial_code/ShapeDescriptors/moments_demo.cpp) @include samples/cpp/tutorial_code/ShapeDescriptors/moments_demo.cpp Explanation diff --git a/doc/tutorials/imgproc/shapedescriptors/point_polygon_test/point_polygon_test.markdown b/doc/tutorials/imgproc/shapedescriptors/point_polygon_test/point_polygon_test.markdown index 4ffb98be48..1f50410616 100644 --- a/doc/tutorials/imgproc/shapedescriptors/point_polygon_test/point_polygon_test.markdown +++ b/doc/tutorials/imgproc/shapedescriptors/point_polygon_test/point_polygon_test.markdown @@ -15,7 +15,7 @@ Code ---- This tutorial code's is shown lines below. You can also download it from -[here](https://github.com/opencv/opencv/tree/master/samples/cpp/tutorial_code/ShapeDescriptors/pointPolygonTest_demo.cpp) +[here](https://github.com/opencv/opencv/tree/3.4/samples/cpp/tutorial_code/ShapeDescriptors/pointPolygonTest_demo.cpp) @include samples/cpp/tutorial_code/ShapeDescriptors/pointPolygonTest_demo.cpp Explanation diff --git a/doc/tutorials/imgproc/threshold/threshold.markdown b/doc/tutorials/imgproc/threshold/threshold.markdown index 5aaa02f731..b4ee7b4623 100644 --- a/doc/tutorials/imgproc/threshold/threshold.markdown +++ b/doc/tutorials/imgproc/threshold/threshold.markdown @@ -98,19 +98,19 @@ Code @add_toggle_cpp The tutorial code's is shown lines below. You can also download it from -[here](https://github.com/opencv/opencv/tree/master/samples/cpp/tutorial_code/ImgProc/Threshold.cpp) +[here](https://github.com/opencv/opencv/tree/3.4/samples/cpp/tutorial_code/ImgProc/Threshold.cpp) @include samples/cpp/tutorial_code/ImgProc/Threshold.cpp @end_toggle @add_toggle_java The tutorial code's is shown lines below. You can also download it from -[here](https://github.com/opencv/opencv/tree/master/samples/java/tutorial_code/ImgProc/threshold/Threshold.java) +[here](https://github.com/opencv/opencv/tree/3.4/samples/java/tutorial_code/ImgProc/threshold/Threshold.java) @include samples/java/tutorial_code/ImgProc/threshold/Threshold.java @end_toggle @add_toggle_python The tutorial code's is shown lines below. You can also download it from -[here](https://github.com/opencv/opencv/tree/master/samples/python/tutorial_code/imgProc/threshold/threshold.py) +[here](https://github.com/opencv/opencv/tree/3.4/samples/python/tutorial_code/imgProc/threshold/threshold.py) @include samples/python/tutorial_code/imgProc/threshold/threshold.py @end_toggle diff --git a/doc/tutorials/imgproc/threshold_inRange/threshold_inRange.markdown b/doc/tutorials/imgproc/threshold_inRange/threshold_inRange.markdown index eaecf20e2b..a0c70f0e0a 100644 --- a/doc/tutorials/imgproc/threshold_inRange/threshold_inRange.markdown +++ b/doc/tutorials/imgproc/threshold_inRange/threshold_inRange.markdown @@ -40,19 +40,19 @@ Code @add_toggle_cpp The tutorial code's is shown lines below. You can also download it from -[here](https://github.com/opencv/opencv/tree/master/samples/cpp/tutorial_code/ImgProc/Threshold_inRange.cpp) +[here](https://github.com/opencv/opencv/tree/3.4/samples/cpp/tutorial_code/ImgProc/Threshold_inRange.cpp) @include samples/cpp/tutorial_code/ImgProc/Threshold_inRange.cpp @end_toggle @add_toggle_java The tutorial code's is shown lines below. You can also download it from -[here](https://github.com/opencv/opencv/tree/master/samples/java/tutorial_code/ImgProc/threshold_inRange/ThresholdInRange.java) +[here](https://github.com/opencv/opencv/tree/3.4/samples/java/tutorial_code/ImgProc/threshold_inRange/ThresholdInRange.java) @include samples/java/tutorial_code/ImgProc/threshold_inRange/ThresholdInRange.java @end_toggle @add_toggle_python The tutorial code's is shown lines below. You can also download it from -[here](https://github.com/opencv/opencv/tree/master/samples/python/tutorial_code/imgProc/threshold_inRange/threshold_inRange.py) +[here](https://github.com/opencv/opencv/tree/3.4/samples/python/tutorial_code/imgProc/threshold_inRange/threshold_inRange.py) @include samples/python/tutorial_code/imgProc/threshold_inRange/threshold_inRange.py @end_toggle diff --git a/doc/tutorials/introduction/android_binary_package/android_ocl_intro.markdown b/doc/tutorials/introduction/android_binary_package/android_ocl_intro.markdown index 3dab6e8c98..4bd1bc6445 100644 --- a/doc/tutorials/introduction/android_binary_package/android_ocl_intro.markdown +++ b/doc/tutorials/introduction/android_binary_package/android_ocl_intro.markdown @@ -17,7 +17,7 @@ If you need help with anything of the above, you may refer to our @ref tutorial_ This tutorial also assumes you have an Android operated device with OpenCL enabled. The related source code is located within OpenCV samples at -[opencv/samples/android/tutorial-4-opencl](https://github.com/opencv/opencv/tree/master/samples/android/tutorial-4-opencl/) directory. +[opencv/samples/android/tutorial-4-opencl](https://github.com/opencv/opencv/tree/3.4/samples/android/tutorial-4-opencl/) directory. Preface ------- @@ -244,7 +244,7 @@ As you can see, inheritors for `Camera` and `Camera2` APIs should implement the @endcode Let's leave the details of their implementation beyond of this tutorial, please refer the -[source code](https://github.com/opencv/opencv/tree/master/samples/android/tutorial-4-opencl/) to see them. +[source code](https://github.com/opencv/opencv/tree/3.4/samples/android/tutorial-4-opencl/) to see them. Preview Frames modification --------------------------- diff --git a/doc/tutorials/introduction/display_image/display_image.markdown b/doc/tutorials/introduction/display_image/display_image.markdown index ba550580b6..60b6c4c3b5 100644 --- a/doc/tutorials/introduction/display_image/display_image.markdown +++ b/doc/tutorials/introduction/display_image/display_image.markdown @@ -14,7 +14,7 @@ Source Code ----------- Download the source code from -[here](https://github.com/opencv/opencv/tree/master/samples/cpp/tutorial_code/introduction/display_image/display_image.cpp). +[here](https://github.com/opencv/opencv/tree/3.4/samples/cpp/tutorial_code/introduction/display_image/display_image.cpp). @include cpp/tutorial_code/introduction/display_image/display_image.cpp diff --git a/doc/tutorials/introduction/windows_visual_studio_opencv/windows_visual_studio_opencv.markdown b/doc/tutorials/introduction/windows_visual_studio_opencv/windows_visual_studio_opencv.markdown index e8ef112b02..caf7e57f98 100644 --- a/doc/tutorials/introduction/windows_visual_studio_opencv/windows_visual_studio_opencv.markdown +++ b/doc/tutorials/introduction/windows_visual_studio_opencv/windows_visual_studio_opencv.markdown @@ -194,7 +194,7 @@ Test it! -------- Now to try this out download our little test [source code -](https://github.com/opencv/opencv/tree/master/samples/cpp/tutorial_code/introduction/windows_visual_studio_opencv/introduction_windows_vs.cpp) +](https://github.com/opencv/opencv/tree/3.4/samples/cpp/tutorial_code/introduction/windows_visual_studio_opencv/introduction_windows_vs.cpp) or get it from the sample code folder of the OpenCV sources. Add this to your project and build it. Here's its content: @@ -210,7 +210,7 @@ the *IDE* the console window will not close once finished. It will wait for a ke This is important to remember when you code inside the code open and save commands. Your resources will be saved ( and queried for at opening!!!) relatively to your working directory. This is unless you give a full, explicit path as a parameter for the I/O functions. In the code above we open [this -OpenCV logo](https://github.com/opencv/opencv/tree/master/samples/data/opencv-logo.png). Before starting up the application, +OpenCV logo](https://github.com/opencv/opencv/tree/3.4/samples/data/opencv-logo.png). Before starting up the application, make sure you place the image file in your current working directory. Modify the image file name inside the code to try it out on other images too. Run it and voil á: diff --git a/doc/tutorials/ml/introduction_to_pca/introduction_to_pca.markdown b/doc/tutorials/ml/introduction_to_pca/introduction_to_pca.markdown index d1fe50b8dd..20a21b6271 100644 --- a/doc/tutorials/ml/introduction_to_pca/introduction_to_pca.markdown +++ b/doc/tutorials/ml/introduction_to_pca/introduction_to_pca.markdown @@ -92,10 +92,10 @@ Source Code ----------- This tutorial code's is shown lines below. You can also download it from - [here](https://github.com/opencv/opencv/tree/master/samples/cpp/tutorial_code/ml/introduction_to_pca/introduction_to_pca.cpp). + [here](https://github.com/opencv/opencv/tree/3.4/samples/cpp/tutorial_code/ml/introduction_to_pca/introduction_to_pca.cpp). @include cpp/tutorial_code/ml/introduction_to_pca/introduction_to_pca.cpp -@note Another example using PCA for dimensionality reduction while maintaining an amount of variance can be found at [opencv_source_code/samples/cpp/pca.cpp](https://github.com/opencv/opencv/tree/master/samples/cpp/pca.cpp) +@note Another example using PCA for dimensionality reduction while maintaining an amount of variance can be found at [opencv_source_code/samples/cpp/pca.cpp](https://github.com/opencv/opencv/tree/3.4/samples/cpp/pca.cpp) Explanation ----------- diff --git a/doc/tutorials/ml/non_linear_svms/non_linear_svms.markdown b/doc/tutorials/ml/non_linear_svms/non_linear_svms.markdown index f98cd63639..6f78dd65e6 100644 --- a/doc/tutorials/ml/non_linear_svms/non_linear_svms.markdown +++ b/doc/tutorials/ml/non_linear_svms/non_linear_svms.markdown @@ -87,7 +87,7 @@ Source Code ----------- You may also find the source code in `samples/cpp/tutorial_code/ml/non_linear_svms` folder of the OpenCV source library or -[download it from here](https://github.com/opencv/opencv/tree/master/samples/cpp/tutorial_code/ml/non_linear_svms/non_linear_svms.cpp). +[download it from here](https://github.com/opencv/opencv/tree/3.4/samples/cpp/tutorial_code/ml/non_linear_svms/non_linear_svms.cpp). @note The following code has been implemented with OpenCV 3.0 classes and functions. An equivalent version of the code using OpenCV 2.4 can be found in [this page.](http://docs.opencv.org/2.4/doc/tutorials/ml/non_linear_svms/non_linear_svms.html#nonlinearsvms) diff --git a/doc/tutorials/objdetect/cascade_classifier/cascade_classifier.markdown b/doc/tutorials/objdetect/cascade_classifier/cascade_classifier.markdown index 53ed36ee68..093588f88a 100644 --- a/doc/tutorials/objdetect/cascade_classifier/cascade_classifier.markdown +++ b/doc/tutorials/objdetect/cascade_classifier/cascade_classifier.markdown @@ -18,7 +18,7 @@ Code ---- This tutorial code's is shown lines below. You can also download it from -[here](https://github.com/opencv/opencv/tree/master/samples/cpp/tutorial_code/objectDetection/objectDetection.cpp) +[here](https://github.com/opencv/opencv/tree/3.4/samples/cpp/tutorial_code/objectDetection/objectDetection.cpp) @include samples/cpp/tutorial_code/objectDetection/objectDetection.cpp Explanation diff --git a/doc/tutorials/objdetect/traincascade.markdown b/doc/tutorials/objdetect/traincascade.markdown index 0dd4e41f46..167e34fe60 100644 --- a/doc/tutorials/objdetect/traincascade.markdown +++ b/doc/tutorials/objdetect/traincascade.markdown @@ -6,7 +6,7 @@ Introduction Working with a boosted cascade of weak classifiers includes two major stages: the training and the detection stage. The detection stage using either HAAR or LBP based models, is described in the @ref tutorial_cascade_classifier "object detection tutorial". This documentation gives an overview of the functionality needed to train your own boosted cascade of weak classifiers. The current guide will walk through all the different stages: collecting training data, preparation of the training data and executing the actual model training. -To support this tutorial, several official OpenCV applications will be used: [opencv_createsamples](https://github.com/opencv/opencv/tree/master/apps/createsamples), [opencv_annotation](https://github.com/opencv/opencv/tree/master/apps/annotation), [opencv_traincascade](https://github.com/opencv/opencv/tree/master/apps/traincascade) and [opencv_visualisation](https://github.com/opencv/opencv/tree/master/apps/visualisation). +To support this tutorial, several official OpenCV applications will be used: [opencv_createsamples](https://github.com/opencv/opencv/tree/3.4/apps/createsamples), [opencv_annotation](https://github.com/opencv/opencv/tree/3.4/apps/annotation), [opencv_traincascade](https://github.com/opencv/opencv/tree/3.4/apps/traincascade) and [opencv_visualisation](https://github.com/opencv/opencv/tree/3.4/apps/visualisation). ### Important notes diff --git a/doc/tutorials/stitching/stitcher/stitcher.markdown b/doc/tutorials/stitching/stitcher/stitcher.markdown index d28bd21d70..d34cac7048 100644 --- a/doc/tutorials/stitching/stitcher/stitcher.markdown +++ b/doc/tutorials/stitching/stitcher/stitcher.markdown @@ -15,7 +15,7 @@ Code ---- This tutorial code's is shown lines below. You can also download it from -[here](https://github.com/opencv/opencv/tree/master/samples/cpp/samples/cpp/stitching.cpp). +[here](https://github.com/opencv/opencv/tree/3.4/samples/cpp/samples/cpp/stitching.cpp). @include samples/cpp/stitching.cpp @@ -111,5 +111,5 @@ See also If you want to study internals of the stitching pipeline or you want to experiment with detailed configuration see -[stitching_detailed.cpp](https://github.com/opencv/opencv/tree/master/samples/cpp/stitching_detailed.cpp) +[stitching_detailed.cpp](https://github.com/opencv/opencv/tree/3.4/samples/cpp/stitching_detailed.cpp) in `opencv/samples/cpp` folder. diff --git a/doc/tutorials/video/background_subtraction/background_subtraction.markdown b/doc/tutorials/video/background_subtraction/background_subtraction.markdown index ed8bd84335..f914379f3e 100644 --- a/doc/tutorials/video/background_subtraction/background_subtraction.markdown +++ b/doc/tutorials/video/background_subtraction/background_subtraction.markdown @@ -43,7 +43,7 @@ file or a sequence of images. We will use @ref cv::BackgroundSubtractorMOG2 in this sample, to generate the foreground mask. The results as well as the input data are shown on the screen. -The source file can be downloaded [here ](https://github.com/opencv/opencv/tree/master/samples/cpp/tutorial_code/video/bg_sub.cpp). +The source file can be downloaded [here ](https://github.com/opencv/opencv/tree/3.4/samples/cpp/tutorial_code/video/bg_sub.cpp). @include samples/cpp/tutorial_code/video/bg_sub.cpp diff --git a/doc/tutorials/videoio/intelperc.markdown b/doc/tutorials/videoio/intelperc.markdown index a36511a978..932d78bd10 100644 --- a/doc/tutorials/videoio/intelperc.markdown +++ b/doc/tutorials/videoio/intelperc.markdown @@ -78,5 +78,5 @@ there are two flags that should be used to set/get property of the needed genera flag value is assumed by default if neither of the two possible values of the property is set. For more information please refer to the example of usage -[intelperc_capture.cpp](https://github.com/opencv/opencv/tree/master/samples/cpp/intelperc_capture.cpp) +[intelperc_capture.cpp](https://github.com/opencv/opencv/tree/3.4/samples/cpp/intelperc_capture.cpp) in opencv/samples/cpp folder. diff --git a/doc/tutorials/videoio/kinect_openni.markdown b/doc/tutorials/videoio/kinect_openni.markdown index 97fbd7ed2b..e8b7ad5b08 100644 --- a/doc/tutorials/videoio/kinect_openni.markdown +++ b/doc/tutorials/videoio/kinect_openni.markdown @@ -134,5 +134,5 @@ property. The following properties of cameras available through OpenNI interface - CAP_OPENNI_DEPTH_GENERATOR_REGISTRATION = CAP_OPENNI_DEPTH_GENERATOR + CAP_PROP_OPENNI_REGISTRATION For more information please refer to the example of usage -[openni_capture.cpp](https://github.com/opencv/opencv/tree/master/samples/cpp/openni_capture.cpp) in +[openni_capture.cpp](https://github.com/opencv/opencv/tree/3.4/samples/cpp/openni_capture.cpp) in opencv/samples/cpp folder. diff --git a/doc/tutorials/videoio/video-input-psnr-ssim/video_input_psnr_ssim.markdown b/doc/tutorials/videoio/video-input-psnr-ssim/video_input_psnr_ssim.markdown index c2a2304a98..a07736f88d 100644 --- a/doc/tutorials/videoio/video-input-psnr-ssim/video_input_psnr_ssim.markdown +++ b/doc/tutorials/videoio/video-input-psnr-ssim/video_input_psnr_ssim.markdown @@ -20,8 +20,8 @@ As a test case where to show off these using OpenCV I've created a small program video files and performs a similarity check between them. This is something you could use to check just how well a new video compressing algorithms works. Let there be a reference (original) video like [this small Megamind clip -](https://github.com/opencv/opencv/tree/master/samples/data/Megamind.avi) and [a compressed -version of it ](https://github.com/opencv/opencv/tree/master/samples/data/Megamind_bugy.avi). +](https://github.com/opencv/opencv/tree/3.4/samples/data/Megamind.avi) and [a compressed +version of it ](https://github.com/opencv/opencv/tree/3.4/samples/data/Megamind_bugy.avi). You may also find the source code and these video file in the `samples/data` folder of the OpenCV source library. diff --git a/doc/tutorials/videoio/video-write/video_write.markdown b/doc/tutorials/videoio/video-write/video_write.markdown index 9781a7075d..dff53c1f87 100644 --- a/doc/tutorials/videoio/video-write/video_write.markdown +++ b/doc/tutorials/videoio/video-write/video_write.markdown @@ -31,7 +31,7 @@ The source code You may also find the source code and these video file in the `samples/cpp/tutorial_code/videoio/video-write/` folder of the OpenCV source library or [download it -from here ](https://github.com/opencv/opencv/tree/master/samples/cpp/tutorial_code/videoio/video-write/video-write.cpp). +from here ](https://github.com/opencv/opencv/tree/3.4/samples/cpp/tutorial_code/videoio/video-write/video-write.cpp). @include cpp/tutorial_code/videoio/video-write/video-write.cpp diff --git a/doc/tutorials/viz/creating_widgets/creating_widgets.markdown b/doc/tutorials/viz/creating_widgets/creating_widgets.markdown index 4e4f6e5aed..3023e9b2d8 100644 --- a/doc/tutorials/viz/creating_widgets/creating_widgets.markdown +++ b/doc/tutorials/viz/creating_widgets/creating_widgets.markdown @@ -12,7 +12,7 @@ In this tutorial you will learn how to Code ---- -You can download the code from [here ](https://github.com/opencv/opencv/tree/master/samples/cpp/tutorial_code/viz/creating_widgets.cpp). +You can download the code from [here ](https://github.com/opencv/opencv/tree/3.4/samples/cpp/tutorial_code/viz/creating_widgets.cpp). @include samples/cpp/tutorial_code/viz/creating_widgets.cpp Explanation diff --git a/doc/tutorials/viz/histo3D/histo3D.markdown b/doc/tutorials/viz/histo3D/histo3D.markdown index fdf174cedd..10e9996aa4 100644 --- a/doc/tutorials/viz/histo3D/histo3D.markdown +++ b/doc/tutorials/viz/histo3D/histo3D.markdown @@ -12,7 +12,7 @@ In this tutorial you will learn how to Code ---- -You can download the code from [here ](https://github.com/opencv/opencv/tree/master/samples/cpp/tutorial_code/viz/histo3D.cpp). +You can download the code from [here ](https://github.com/opencv/opencv/tree/3.4/samples/cpp/tutorial_code/viz/histo3D.cpp). @include samples/cpp/tutorial_code/viz/histo3D.cpp Explanation diff --git a/doc/tutorials/viz/launching_viz/launching_viz.markdown b/doc/tutorials/viz/launching_viz/launching_viz.markdown index 07719c67eb..5dc6a85883 100644 --- a/doc/tutorials/viz/launching_viz/launching_viz.markdown +++ b/doc/tutorials/viz/launching_viz/launching_viz.markdown @@ -14,7 +14,7 @@ In this tutorial you will learn how to Code ---- -You can download the code from [here ](https://github.com/opencv/opencv/tree/master/samples/cpp/tutorial_code/viz/launching_viz.cpp). +You can download the code from [here ](https://github.com/opencv/opencv/tree/3.4/samples/cpp/tutorial_code/viz/launching_viz.cpp). @include samples/cpp/tutorial_code/viz/launching_viz.cpp Explanation diff --git a/doc/tutorials/viz/transformations/transformations.markdown b/doc/tutorials/viz/transformations/transformations.markdown index 512ce80bdb..c10b90bd66 100644 --- a/doc/tutorials/viz/transformations/transformations.markdown +++ b/doc/tutorials/viz/transformations/transformations.markdown @@ -13,7 +13,7 @@ In this tutorial you will learn how to Code ---- -You can download the code from [here ](https://github.com/opencv/opencv/tree/master/samples/cpp/tutorial_code/viz/transformations.cpp). +You can download the code from [here ](https://github.com/opencv/opencv/tree/3.4/samples/cpp/tutorial_code/viz/transformations.cpp). @include samples/cpp/tutorial_code/viz/transformations.cpp Explanation diff --git a/doc/tutorials/viz/widget_pose/widget_pose.markdown b/doc/tutorials/viz/widget_pose/widget_pose.markdown index 382ae98556..ea3b93e778 100644 --- a/doc/tutorials/viz/widget_pose/widget_pose.markdown +++ b/doc/tutorials/viz/widget_pose/widget_pose.markdown @@ -13,7 +13,7 @@ In this tutorial you will learn how to Code ---- -You can download the code from [here ](https://github.com/opencv/opencv/tree/master/samples/cpp/tutorial_code/viz/widget_pose.cpp). +You can download the code from [here ](https://github.com/opencv/opencv/tree/3.4/samples/cpp/tutorial_code/viz/widget_pose.cpp). @include samples/cpp/tutorial_code/viz/widget_pose.cpp Explanation diff --git a/modules/core/doc/cuda.markdown b/modules/core/doc/cuda.markdown index ea85007a34..19b2dfc2d6 100644 --- a/modules/core/doc/cuda.markdown +++ b/modules/core/doc/cuda.markdown @@ -82,4 +82,4 @@ Block Matching algorithm has been successfully parallelized using the following 3. Merge the results into a single disparity map. With this algorithm, a dual GPU gave a 180% performance increase comparing to the single Fermi GPU. -For a source code example, see . +For a source code example, see . diff --git a/modules/highgui/include/opencv2/highgui.hpp b/modules/highgui/include/opencv2/highgui.hpp index 0394c7d70d..06cfd80ac3 100644 --- a/modules/highgui/include/opencv2/highgui.hpp +++ b/modules/highgui/include/opencv2/highgui.hpp @@ -456,7 +456,7 @@ CV_EXPORTS_W Rect getWindowImageRect(const String& winname); @param winname Name of the window. @param onMouse Mouse callback. See OpenCV samples, such as -, on how to specify and +, on how to specify and use the callback. @param userdata The optional parameter passed to the callback. */ diff --git a/modules/objdetect/include/opencv2/objdetect.hpp b/modules/objdetect/include/opencv2/objdetect.hpp index eac9cbaf58..8db0789e6c 100644 --- a/modules/objdetect/include/opencv2/objdetect.hpp +++ b/modules/objdetect/include/opencv2/objdetect.hpp @@ -91,7 +91,7 @@ compensate for the differences in the size of areas. The sums of pixel values ov regions are calculated rapidly using integral images (see below and the integral description). To see the object detector at work, have a look at the facedetect demo: - + The following reference is for the detection part only. There is a separate application called opencv_traincascade that can train a cascade of boosted classifiers from a set of samples. diff --git a/samples/cpp/facial_features.cpp b/samples/cpp/facial_features.cpp index 6dbef75c49..4fdfcb74d8 100644 --- a/samples/cpp/facial_features.cpp +++ b/samples/cpp/facial_features.cpp @@ -89,7 +89,7 @@ static void help() "\tThis will detect only the face in image.jpg.\n"; cout << " \n\nThe classifiers for face and eyes can be downloaded from : " - " \nhttps://github.com/opencv/opencv/tree/master/data/haarcascades"; + " \nhttps://github.com/opencv/opencv/tree/3.4/data/haarcascades"; cout << "\n\nThe classifiers for nose and mouth can be downloaded from : " " \nhttps://github.com/opencv/opencv_contrib/tree/master/modules/face/data/cascades\n"; diff --git a/samples/dnn/README.md b/samples/dnn/README.md index 9072ddb2a8..4b1bcb8d0b 100644 --- a/samples/dnn/README.md +++ b/samples/dnn/README.md @@ -7,7 +7,7 @@ | Model | Scale | Size WxH| Mean subtraction | Channels order | |---------------|-------|-----------|--------------------|-------| | [MobileNet-SSD, Caffe](https://github.com/chuanqi305/MobileNet-SSD/) | `0.00784 (2/255)` | `300x300` | `127.5 127.5 127.5` | BGR | -| [OpenCV face detector](https://github.com/opencv/opencv/tree/master/samples/dnn/face_detector) | `1.0` | `300x300` | `104 177 123` | BGR | +| [OpenCV face detector](https://github.com/opencv/opencv/tree/3.4/samples/dnn/face_detector) | `1.0` | `300x300` | `104 177 123` | BGR | | [SSDs from TensorFlow](https://github.com/tensorflow/models/tree/master/research/object_detection/) | `0.00784 (2/255)` | `300x300` | `127.5 127.5 127.5` | RGB | | [YOLO](https://pjreddie.com/darknet/yolo/) | `0.00392 (1/255)` | `416x416` | `0 0 0` | RGB | | [VGG16-SSD](https://github.com/weiliu89/caffe/tree/ssd) | `1.0` | `300x300` | `104 117 123` | BGR | @@ -17,7 +17,7 @@ | [Faster-RCNN, InceptionV2 backbone](https://github.com/tensorflow/models/tree/master/research/object_detection/) | `0.00784 (2/255)` | `300x300` | `127.5 127.5 127.5` | RGB | #### Face detection -[An origin model](https://github.com/opencv/opencv/tree/master/samples/dnn/face_detector) +[An origin model](https://github.com/opencv/opencv/tree/3.4/samples/dnn/face_detector) with single precision floating point weights has been quantized using [TensorFlow framework](https://www.tensorflow.org/). To achieve the best accuracy run the model on BGR images resized to `300x300` applying mean subtraction of values `(104, 177, 123)` for each blue, green and red channels correspondingly. @@ -60,4 +60,4 @@ AR @[ IoU=0.50:0.95 | area= large | maxDets=100 ] | 0.528 | 0.528 | * [Models downloading script](https://github.com/opencv/opencv_extra/blob/master/testdata/dnn/download_models.py) * [Configuration files adopted for OpenCV](https://github.com/opencv/opencv_extra/tree/master/testdata/dnn) * [How to import models from TensorFlow Object Detection API](https://github.com/opencv/opencv/wiki/TensorFlow-Object-Detection-API) -* [Names of classes from different datasets](https://github.com/opencv/opencv/tree/master/samples/data/dnn) +* [Names of classes from different datasets](https://github.com/opencv/opencv/tree/3.4/samples/data/dnn) From 799b4f48e7d86c1a92cfd17aa575a2ead6363392 Mon Sep 17 00:00:00 2001 From: Alexander Alekhin Date: Thu, 31 May 2018 16:53:44 +0300 Subject: [PATCH 19/40] fix missing precomp.hpp --- modules/dnn/src/tensorflow/tf_graph_simplifier.cpp | 2 ++ 1 file changed, 2 insertions(+) diff --git a/modules/dnn/src/tensorflow/tf_graph_simplifier.cpp b/modules/dnn/src/tensorflow/tf_graph_simplifier.cpp index 9208588e65..a537358a1f 100644 --- a/modules/dnn/src/tensorflow/tf_graph_simplifier.cpp +++ b/modules/dnn/src/tensorflow/tf_graph_simplifier.cpp @@ -5,6 +5,8 @@ // Copyright (C) 2018, Intel Corporation, all rights reserved. // Third party copyrights are property of their respective owners. +#include "../precomp.hpp" + #ifdef HAVE_PROTOBUF #include "tf_graph_simplifier.hpp" From 229e8b1b18a372406f1df424b18a6dd04c2124fe Mon Sep 17 00:00:00 2001 From: take1014 Date: Thu, 31 May 2018 22:54:32 +0900 Subject: [PATCH 20/40] Correct handling when IPP is enabled --- modules/imgproc/src/histogram.cpp | 4 ++++ modules/imgproc/test/test_histograms.cpp | 30 ++++++++++++++++++++++++ 2 files changed, 34 insertions(+) diff --git a/modules/imgproc/src/histogram.cpp b/modules/imgproc/src/histogram.cpp index 9ff52e3bcc..f3ddeaf78c 100644 --- a/modules/imgproc/src/histogram.cpp +++ b/modules/imgproc/src/histogram.cpp @@ -821,6 +821,10 @@ static bool ipp_calchist(const Mat &image, Mat &hist, int histSize, const float* return false; #endif + // IPP_DISABLE_HISTOGRAM - https://github.com/opencv/opencv/issues/11544 + if (uniform && (ranges[0][1] - ranges[0][0]) != histSize) + return false; + Mat ihist = hist; if(accumulate) ihist.create(1, &histSize, CV_32S); diff --git a/modules/imgproc/test/test_histograms.cpp b/modules/imgproc/test/test_histograms.cpp index 10f74a3eb5..5386c29ac7 100644 --- a/modules/imgproc/test/test_histograms.cpp +++ b/modules/imgproc/test/test_histograms.cpp @@ -1918,5 +1918,35 @@ TEST(Imgproc_Hist_CalcBackProject, accuracy) { CV_CalcBackProjectTest test; test TEST(Imgproc_Hist_CalcBackProjectPatch, accuracy) { CV_CalcBackProjectPatchTest test; test.safe_run(); } TEST(Imgproc_Hist_BayesianProb, accuracy) { CV_BayesianProbTest test; test.safe_run(); } +TEST(Imgproc_Hist_Calc, calcHist_regression_11544) +{ + cv::Mat1w m = cv::Mat1w::zeros(10, 10); + int n_images = 1; + int channels[] = { 0 }; + cv::Mat mask; + cv::MatND hist1, hist2; + cv::MatND hist1_opt, hist2_opt; + int dims = 1; + int hist_size[] = { 1000 }; + float range1[] = { 0, 900 }; + float range2[] = { 0, 1000 }; + const float* ranges1[] = { range1 }; + const float* ranges2[] = { range2 }; + + setUseOptimized(false); + cv::calcHist(&m, n_images, channels, mask, hist1, dims, hist_size, ranges1); + cv::calcHist(&m, n_images, channels, mask, hist2, dims, hist_size, ranges2); + + setUseOptimized(true); + cv::calcHist(&m, n_images, channels, mask, hist1_opt, dims, hist_size, ranges1); + cv::calcHist(&m, n_images, channels, mask, hist2_opt, dims, hist_size, ranges2); + + for(int i = 0; i < 1000; i++) + { + EXPECT_EQ(hist1.at(i, 0), hist1_opt.at(i, 0)) << i; + EXPECT_EQ(hist2.at(i, 0), hist2_opt.at(i, 0)) << i; + } +} + }} // namespace /* End Of File */ From b934702c7fad821cd079050c5e94aa692c05c830 Mon Sep 17 00:00:00 2001 From: Alexander Alekhin Date: Thu, 31 May 2018 15:39:59 +0300 Subject: [PATCH 21/40] cmake: support find_package(FFMPEG) Use "OPENCV_FFMPEG_USE_FIND_PACKAGE" variable to specify find_package() parameters --- CMakeLists.txt | 4 +- cmake/OpenCVFindLibsVideo.cmake | 53 +++++++++------ modules/videoio/CMakeLists.txt | 11 ++- modules/videoio/src/cap_ffmpeg.cpp | 92 +++++++++++++++++--------- modules/videoio/src/cap_ffmpeg_api.hpp | 22 +++--- 5 files changed, 114 insertions(+), 68 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 5190c5081f..c78abbc247 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -1250,7 +1250,9 @@ if(WITH_1394 OR HAVE_DC1394) endif() if(WITH_FFMPEG OR HAVE_FFMPEG) - if(WIN32) + if(OPENCV_FFMPEG_USE_FIND_PACKAGE) + status(" FFMPEG:" HAVE_FFMPEG THEN "YES (find_package)" ELSE "NO (find_package)") + elseif(WIN32) status(" FFMPEG:" HAVE_FFMPEG THEN "YES (prebuilt binaries)" ELSE NO) else() status(" FFMPEG:" HAVE_FFMPEG THEN YES ELSE NO) diff --git a/cmake/OpenCVFindLibsVideo.cmake b/cmake/OpenCVFindLibsVideo.cmake index 9ed9ceb823..b9d15c38b8 100644 --- a/cmake/OpenCVFindLibsVideo.cmake +++ b/cmake/OpenCVFindLibsVideo.cmake @@ -212,12 +212,23 @@ endif(WITH_XIMEA) # --- FFMPEG --- ocv_clear_vars(HAVE_FFMPEG) -if(WITH_FFMPEG) - if(WIN32 AND NOT ARM) +if(WITH_FFMPEG) # try FFmpeg autodetection + if(OPENCV_FFMPEG_USE_FIND_PACKAGE) + if(OPENCV_FFMPEG_USE_FIND_PACKAGE STREQUAL "1" OR OPENCV_FFMPEG_USE_FIND_PACKAGE STREQUAL "ON") + set(OPENCV_FFMPEG_USE_FIND_PACKAGE "FFMPEG") + endif() + find_package(${OPENCV_FFMPEG_USE_FIND_PACKAGE}) # Required components: AVCODEC AVFORMAT AVUTIL SWSCALE + if(FFMPEG_FOUND OR FFmpeg_FOUND) + set(HAVE_FFMPEG TRUE) + else() + message(STATUS "Can't find FFmpeg via find_package(${OPENCV_FFMPEG_USE_FIND_PACKAGE})") + endif() + elseif(WIN32 AND NOT ARM AND NOT OPENCV_FFMPEG_SKIP_DOWNLOAD) include("${OpenCV_SOURCE_DIR}/3rdparty/ffmpeg/ffmpeg.cmake") download_win_ffmpeg(FFMPEG_CMAKE_SCRIPT) if(FFMPEG_CMAKE_SCRIPT) set(HAVE_FFMPEG TRUE) + set(HAVE_FFMPEG_WRAPPER 1) include("${FFMPEG_CMAKE_SCRIPT}") endif() elseif(PKG_CONFIG_FOUND) @@ -226,27 +237,29 @@ if(WITH_FFMPEG) if(FFMPEG_libavresample_FOUND) ocv_append_build_options(FFMPEG FFMPEG_libavresample) endif() - if(HAVE_FFMPEG) - try_compile(__VALID_FFMPEG - "${OpenCV_BINARY_DIR}" - "${OpenCV_SOURCE_DIR}/cmake/checks/ffmpeg_test.cpp" - CMAKE_FLAGS "-DINCLUDE_DIRECTORIES:STRING=${FFMPEG_INCLUDE_DIRS}" - "-DLINK_DIRECTORIES:STRING=${FFMPEG_LIBRARY_DIRS}" - "-DLINK_LIBRARIES:STRING=${FFMPEG_LIBRARIES}" - OUTPUT_VARIABLE TRY_OUT - ) - if(NOT __VALID_FFMPEG) - #message(FATAL_ERROR "FFMPEG: test check build log:\n${TRY_OUT}") - message(STATUS "WARNING: Can't build ffmpeg test code") - set(HAVE_FFMPEG FALSE) - else() - ocv_append_build_options(VIDEOIO FFMPEG) - endif() - endif() else() message(STATUS "Can't find ffmpeg - 'pkg-config' utility is missing") endif() -endif(WITH_FFMPEG) +endif() +if(HAVE_FFMPEG + AND NOT HAVE_FFMPEG_WRAPPER +) + try_compile(__VALID_FFMPEG + "${OpenCV_BINARY_DIR}" + "${OpenCV_SOURCE_DIR}/cmake/checks/ffmpeg_test.cpp" + CMAKE_FLAGS "-DINCLUDE_DIRECTORIES:STRING=${FFMPEG_INCLUDE_DIRS}" + "-DLINK_DIRECTORIES:STRING=${FFMPEG_LIBRARY_DIRS}" + "-DLINK_LIBRARIES:STRING=${FFMPEG_LIBRARIES}" + OUTPUT_VARIABLE TRY_OUT + ) + if(NOT __VALID_FFMPEG) + #message(FATAL_ERROR "FFMPEG: test check build log:\n${TRY_OUT}") + message(STATUS "WARNING: Can't build ffmpeg test code") + set(HAVE_FFMPEG FALSE) + else() + ocv_append_build_options(VIDEOIO FFMPEG) + endif() +endif() # --- VideoInput/DirectShow --- if(WITH_DSHOW) diff --git a/modules/videoio/CMakeLists.txt b/modules/videoio/CMakeLists.txt index c6fee91924..f5eba046c5 100644 --- a/modules/videoio/CMakeLists.txt +++ b/modules/videoio/CMakeLists.txt @@ -167,6 +167,9 @@ if(HAVE_FFMPEG) if(APPLE) list(APPEND VIDEOIO_LIBRARIES "-framework VideoDecodeAcceleration" bz2) endif() + if(HAVE_FFMPEG_WRAPPER) + add_definitions(-DHAVE_FFMPEG_WRAPPER=1) + endif() endif(HAVE_FFMPEG) if(HAVE_PVAPI) @@ -230,12 +233,6 @@ if(IOS) list(APPEND VIDEOIO_LIBRARIES "-framework Accelerate" "-framework AVFoundation" "-framework CoreGraphics" "-framework CoreImage" "-framework CoreMedia" "-framework CoreVideo" "-framework QuartzCore" "-framework UIKit") endif() -if(WIN32) - link_directories("${OpenCV_SOURCE_DIR}/3rdparty/lib") # for ffmpeg wrapper only - include_directories(AFTER SYSTEM "${OpenCV_SOURCE_DIR}/3rdparty/include") # for directshow in VS2005 and multi-monitor support on MinGW - include_directories(AFTER SYSTEM "${OpenCV_SOURCE_DIR}/3rdparty/include/ffmpeg_") # for tests -endif() - if(UNIX) #these variables are set by CHECK_MODULE macro foreach(P ${VIDEOIO_INCLUDE_DIRS}) @@ -268,7 +265,7 @@ endif() ocv_warnings_disable(CMAKE_CXX_FLAGS -Wno-deprecated-declarations) -if(WIN32 AND HAVE_FFMPEG) +if(WIN32 AND HAVE_FFMPEG_WRAPPER) #copy ffmpeg dll to the output folder if(MSVC64 OR MINGW64) set(FFMPEG_SUFFIX _64) diff --git a/modules/videoio/src/cap_ffmpeg.cpp b/modules/videoio/src/cap_ffmpeg.cpp index 25f7aa60b5..14353ad13c 100644 --- a/modules/videoio/src/cap_ffmpeg.cpp +++ b/modules/videoio/src/cap_ffmpeg.cpp @@ -41,13 +41,28 @@ #include "precomp.hpp" +#if defined(HAVE_FFMPEG) + #include -#if defined HAVE_FFMPEG && !defined _WIN32 +#if !defined(HAVE_FFMPEG_WRAPPER) #include "cap_ffmpeg_impl.hpp" + +#define icvCreateFileCapture_FFMPEG_p cvCreateFileCapture_FFMPEG +#define icvReleaseCapture_FFMPEG_p cvReleaseCapture_FFMPEG +#define icvGrabFrame_FFMPEG_p cvGrabFrame_FFMPEG +#define icvRetrieveFrame_FFMPEG_p cvRetrieveFrame_FFMPEG +#define icvSetCaptureProperty_FFMPEG_p cvSetCaptureProperty_FFMPEG +#define icvGetCaptureProperty_FFMPEG_p cvGetCaptureProperty_FFMPEG +#define icvCreateVideoWriter_FFMPEG_p cvCreateVideoWriter_FFMPEG +#define icvReleaseVideoWriter_FFMPEG_p cvReleaseVideoWriter_FFMPEG +#define icvWriteFrame_FFMPEG_p cvWriteFrame_FFMPEG + #else + #include "cap_ffmpeg_api.hpp" -#endif + +namespace cv { namespace { static CvCreateFileCapture_Plugin icvCreateFileCapture_FFMPEG_p = 0; static CvReleaseCapture_Plugin icvReleaseCapture_FFMPEG_p = 0; @@ -99,7 +114,7 @@ private: icvInitFFMPEG() { - #if defined _WIN32 +#if defined _WIN32 const wchar_t* module_name_ = L"opencv_ffmpeg" CVAUX_STRW(CV_MAJOR_VERSION) CVAUX_STRW(CV_MINOR_VERSION) CVAUX_STRW(CV_SUBMINOR_VERSION) #if (defined _MSC_VER && defined _M_X64) || (defined __GNUC__ && defined __x86_64__) @@ -161,7 +176,7 @@ private: (CvReleaseVideoWriter_Plugin)GetProcAddress(icvFFOpenCV, "cvReleaseVideoWriter_FFMPEG"); icvWriteFrame_FFMPEG_p = (CvWriteFrame_Plugin)GetProcAddress(icvFFOpenCV, "cvWriteFrame_FFMPEG"); - +# endif // _WIN32 #if 0 if( icvCreateFileCapture_FFMPEG_p != 0 && icvReleaseCapture_FFMPEG_p != 0 && @@ -181,21 +196,18 @@ private: } #endif } - #elif defined HAVE_FFMPEG - icvCreateFileCapture_FFMPEG_p = (CvCreateFileCapture_Plugin)cvCreateFileCapture_FFMPEG; - icvReleaseCapture_FFMPEG_p = (CvReleaseCapture_Plugin)cvReleaseCapture_FFMPEG; - icvGrabFrame_FFMPEG_p = (CvGrabFrame_Plugin)cvGrabFrame_FFMPEG; - icvRetrieveFrame_FFMPEG_p = (CvRetrieveFrame_Plugin)cvRetrieveFrame_FFMPEG; - icvSetCaptureProperty_FFMPEG_p = (CvSetCaptureProperty_Plugin)cvSetCaptureProperty_FFMPEG; - icvGetCaptureProperty_FFMPEG_p = (CvGetCaptureProperty_Plugin)cvGetCaptureProperty_FFMPEG; - icvCreateVideoWriter_FFMPEG_p = (CvCreateVideoWriter_Plugin)cvCreateVideoWriter_FFMPEG; - icvReleaseVideoWriter_FFMPEG_p = (CvReleaseVideoWriter_Plugin)cvReleaseVideoWriter_FFMPEG; - icvWriteFrame_FFMPEG_p = (CvWriteFrame_Plugin)cvWriteFrame_FFMPEG; - #endif } }; +}} // namespace +#endif // HAVE_FFMPEG_WRAPPER + + + +namespace cv { +namespace { + class CvCapture_FFMPEG_proxy CV_FINAL : public cv::IVideoCapture { public: @@ -228,19 +240,20 @@ public: } virtual bool open( const cv::String& filename ) { - icvInitFFMPEG::Init(); close(); - if( !icvCreateFileCapture_FFMPEG_p ) - return false; ffmpegCapture = icvCreateFileCapture_FFMPEG_p( filename.c_str() ); return ffmpegCapture != 0; } virtual void close() { - if( ffmpegCapture && icvReleaseCapture_FFMPEG_p ) + if (ffmpegCapture +#if defined(HAVE_FFMPEG_WRAPPER) + && icvReleaseCapture_FFMPEG_p +#endif +) icvReleaseCapture_FFMPEG_p( &ffmpegCapture ); - assert( ffmpegCapture == 0 ); + CV_Assert(ffmpegCapture == 0); ffmpegCapture = 0; } @@ -248,18 +261,26 @@ public: virtual int getCaptureDomain() CV_OVERRIDE { return CV_CAP_FFMPEG; } protected: - void* ffmpegCapture; + CvCapture_FFMPEG* ffmpegCapture; }; +} // namespace -cv::Ptr cv::cvCreateFileCapture_FFMPEG_proxy(const cv::String& filename) +cv::Ptr cvCreateFileCapture_FFMPEG_proxy(const cv::String& filename) { +#if defined(HAVE_FFMPEG_WRAPPER) + icvInitFFMPEG::Init(); + if (!icvCreateFileCapture_FFMPEG_p) + return cv::Ptr(); +#endif cv::Ptr capture = cv::makePtr(filename); if (capture && capture->isOpened()) return capture; return cv::Ptr(); } +namespace { + class CvVideoWriter_FFMPEG_proxy CV_FINAL : public cv::IVideoWriter { @@ -278,19 +299,20 @@ public: } virtual bool open( const cv::String& filename, int fourcc, double fps, cv::Size frameSize, bool isColor ) { - icvInitFFMPEG::Init(); close(); - if( !icvCreateVideoWriter_FFMPEG_p ) - return false; ffmpegWriter = icvCreateVideoWriter_FFMPEG_p( filename.c_str(), fourcc, fps, frameSize.width, frameSize.height, isColor ); return ffmpegWriter != 0; } virtual void close() { - if( ffmpegWriter && icvReleaseVideoWriter_FFMPEG_p ) + if (ffmpegWriter +#if defined(HAVE_FFMPEG_WRAPPER) + && icvReleaseVideoWriter_FFMPEG_p +#endif + ) icvReleaseVideoWriter_FFMPEG_p( &ffmpegWriter ); - assert( ffmpegWriter == 0 ); + CV_Assert(ffmpegWriter == 0); ffmpegWriter = 0; } @@ -299,15 +321,25 @@ public: virtual bool isOpened() const CV_OVERRIDE { return ffmpegWriter != 0; } protected: - void* ffmpegWriter; + CvVideoWriter_FFMPEG* ffmpegWriter; }; +} // namespace -cv::Ptr cv::cvCreateVideoWriter_FFMPEG_proxy(const cv::String& filename, int fourcc, - double fps, cv::Size frameSize, int isColor) +cv::Ptr cvCreateVideoWriter_FFMPEG_proxy(const cv::String& filename, int fourcc, + double fps, cv::Size frameSize, int isColor) { +#if defined(HAVE_FFMPEG_WRAPPER) + icvInitFFMPEG::Init(); + if (!icvCreateVideoWriter_FFMPEG_p) + return cv::Ptr(); +#endif cv::Ptr writer = cv::makePtr(filename, fourcc, fps, frameSize, isColor != 0); if (writer && writer->isOpened()) return writer; return cv::Ptr(); } + +} // namespace + +#endif // defined(HAVE_FFMPEG) diff --git a/modules/videoio/src/cap_ffmpeg_api.hpp b/modules/videoio/src/cap_ffmpeg_api.hpp index 7144f4ab9d..96bb8ee47c 100644 --- a/modules/videoio/src/cap_ffmpeg_api.hpp +++ b/modules/videoio/src/cap_ffmpeg_api.hpp @@ -28,6 +28,8 @@ enum CV_FFMPEG_CAP_PROP_SAR_DEN=41 }; +typedef struct CvCapture_FFMPEG CvCapture_FFMPEG; +typedef struct CvVideoWriter_FFMPEG CvVideoWriter_FFMPEG; OPENCV_FFMPEG_API struct CvCapture_FFMPEG* cvCreateFileCapture_FFMPEG(const char* filename); OPENCV_FFMPEG_API struct CvCapture_FFMPEG_2* cvCreateFileCapture_FFMPEG_2(const char* filename); @@ -55,19 +57,19 @@ OPENCV_FFMPEG_API int cvWriteFrame_FFMPEG(struct CvVideoWriter_FFMPEG* writer, c OPENCV_FFMPEG_API void cvReleaseVideoWriter_FFMPEG(struct CvVideoWriter_FFMPEG** writer); -typedef void* (*CvCreateFileCapture_Plugin)( const char* filename ); -typedef void* (*CvCreateCameraCapture_Plugin)( int index ); -typedef int (*CvGrabFrame_Plugin)( void* capture_handle ); -typedef int (*CvRetrieveFrame_Plugin)( void* capture_handle, unsigned char** data, int* step, +typedef CvCapture_FFMPEG* (*CvCreateFileCapture_Plugin)( const char* filename ); +typedef CvCapture_FFMPEG* (*CvCreateCameraCapture_Plugin)( int index ); +typedef int (*CvGrabFrame_Plugin)( CvCapture_FFMPEG* capture_handle ); +typedef int (*CvRetrieveFrame_Plugin)( CvCapture_FFMPEG* capture_handle, unsigned char** data, int* step, int* width, int* height, int* cn ); -typedef int (*CvSetCaptureProperty_Plugin)( void* capture_handle, int prop_id, double value ); -typedef double (*CvGetCaptureProperty_Plugin)( void* capture_handle, int prop_id ); -typedef void (*CvReleaseCapture_Plugin)( void** capture_handle ); -typedef void* (*CvCreateVideoWriter_Plugin)( const char* filename, int fourcc, +typedef int (*CvSetCaptureProperty_Plugin)( CvCapture_FFMPEG* capture_handle, int prop_id, double value ); +typedef double (*CvGetCaptureProperty_Plugin)( CvCapture_FFMPEG* capture_handle, int prop_id ); +typedef void (*CvReleaseCapture_Plugin)( CvCapture_FFMPEG** capture_handle ); +typedef CvVideoWriter_FFMPEG* (*CvCreateVideoWriter_Plugin)( const char* filename, int fourcc, double fps, int width, int height, int iscolor ); -typedef int (*CvWriteFrame_Plugin)( void* writer_handle, const unsigned char* data, int step, +typedef int (*CvWriteFrame_Plugin)( CvVideoWriter_FFMPEG* writer_handle, const unsigned char* data, int step, int width, int height, int cn, int origin); -typedef void (*CvReleaseVideoWriter_Plugin)( void** writer ); +typedef void (*CvReleaseVideoWriter_Plugin)( CvVideoWriter_FFMPEG** writer ); /* * For CUDA encoder From 5d68c8dbe71b0bd5a4e0925286db6004b0b92a4a Mon Sep 17 00:00:00 2001 From: Alexander Alekhin Date: Thu, 31 May 2018 18:54:43 +0300 Subject: [PATCH 22/40] videoio(test): fixup information dump --- modules/videoio/test/test_ffmpeg.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/videoio/test/test_ffmpeg.cpp b/modules/videoio/test/test_ffmpeg.cpp index c5e326a86a..e9e5e9aca1 100644 --- a/modules/videoio/test/test_ffmpeg.cpp +++ b/modules/videoio/test/test_ffmpeg.cpp @@ -357,7 +357,7 @@ public: for (unsigned int i = 0; i < frameCount && next; ++i) { - SCOPED_TRACE(cv::format("frame=%d", (int)frameCount)); + SCOPED_TRACE(cv::format("frame=%d/%d", (int)i, (int)frameCount)); Mat actual; (*capture) >> actual; From f1858024899b02854bb9adbed7303378ddd2eb48 Mon Sep 17 00:00:00 2001 From: Alexander Alekhin Date: Thu, 31 May 2018 19:30:56 +0300 Subject: [PATCH 23/40] documentation: avoid links to 'master' branch from 3.4 maintenance branch (2) Other links: - https://raw.githubusercontent.com/opencv/opencv/master - https://github.com/opencv/opencv/blob/master --- .../core/adding_images/adding_images.markdown | 8 ++++---- .../basic_geometric_drawing.markdown | 6 +++--- .../basic_linear_transform.markdown | 2 +- .../discrete_fourier_transform.markdown | 8 ++++---- .../how_to_use_OpenCV_parallel_for_.markdown | 4 ++-- .../mat-mask-operations/mat_mask_operations.markdown | 6 +++--- doc/tutorials/dnn/dnn_googlenet/dnn_googlenet.markdown | 2 +- doc/tutorials/dnn/dnn_yolo/dnn_yolo.markdown | 4 ++-- .../gausian_median_blur_bilateral_filter.markdown | 8 ++++---- doc/tutorials/imgproc/hitOrMiss/hitOrMiss.markdown | 6 +++--- .../imgtrans/copyMakeBorder/copyMakeBorder.markdown | 6 +++--- .../imgproc/imgtrans/filter_2d/filter_2d.markdown | 6 +++--- .../imgtrans/hough_circle/hough_circle.markdown | 10 +++++----- .../imgproc/imgtrans/hough_lines/hough_lines.markdown | 10 +++++----- .../laplace_operator/laplace_operator.markdown | 6 +++--- .../sobel_derivatives/sobel_derivatives.markdown | 6 +++--- .../imgproc/imgtrans/warp_affine/warp_affine.markdown | 6 +++--- .../morph_lines_detection/morph_lines_detection.md | 8 ++++---- doc/tutorials/imgproc/pyramids/pyramids.markdown | 8 ++++---- modules/dnn/test/pascal_semsegm_test_fcn.py | 4 ++-- samples/dnn/README.md | 2 +- samples/dnn/js_face_recognition.html | 2 +- 22 files changed, 64 insertions(+), 64 deletions(-) diff --git a/doc/tutorials/core/adding_images/adding_images.markdown b/doc/tutorials/core/adding_images/adding_images.markdown index c8776325a3..0da6d2d33e 100644 --- a/doc/tutorials/core/adding_images/adding_images.markdown +++ b/doc/tutorials/core/adding_images/adding_images.markdown @@ -33,19 +33,19 @@ Source Code @add_toggle_cpp Download the source code from -[here](https://raw.githubusercontent.com/opencv/opencv/master/samples/cpp/tutorial_code/core/AddingImages/AddingImages.cpp). +[here](https://raw.githubusercontent.com/opencv/opencv/3.4/samples/cpp/tutorial_code/core/AddingImages/AddingImages.cpp). @include cpp/tutorial_code/core/AddingImages/AddingImages.cpp @end_toggle @add_toggle_java Download the source code from -[here](https://raw.githubusercontent.com/opencv/opencv/master/samples/java/tutorial_code/core/AddingImages/AddingImages.java). +[here](https://raw.githubusercontent.com/opencv/opencv/3.4/samples/java/tutorial_code/core/AddingImages/AddingImages.java). @include java/tutorial_code/core/AddingImages/AddingImages.java @end_toggle @add_toggle_python Download the source code from -[here](https://raw.githubusercontent.com/opencv/opencv/master/samples/python/tutorial_code/core/AddingImages/adding_images.py). +[here](https://raw.githubusercontent.com/opencv/opencv/3.4/samples/python/tutorial_code/core/AddingImages/adding_images.py). @include python/tutorial_code/core/AddingImages/adding_images.py @end_toggle @@ -69,7 +69,7 @@ We need two source images (\f$f_{0}(x)\f$ and \f$f_{1}(x)\f$). So, we load them @snippet python/tutorial_code/core/AddingImages/adding_images.py load @end_toggle -We used the following images: [LinuxLogo.jpg](https://raw.githubusercontent.com/opencv/opencv/master/samples/data/LinuxLogo.jpg) and [WindowsLogo.jpg](https://raw.githubusercontent.com/opencv/opencv/master/samples/data/WindowsLogo.jpg) +We used the following images: [LinuxLogo.jpg](https://raw.githubusercontent.com/opencv/opencv/3.4/samples/data/LinuxLogo.jpg) and [WindowsLogo.jpg](https://raw.githubusercontent.com/opencv/opencv/3.4/samples/data/WindowsLogo.jpg) @warning Since we are *adding* *src1* and *src2*, they both have to be of the same size (width and height) and type. diff --git a/doc/tutorials/core/basic_geometric_drawing/basic_geometric_drawing.markdown b/doc/tutorials/core/basic_geometric_drawing/basic_geometric_drawing.markdown index ec3a2ffadc..02ec53cb3f 100644 --- a/doc/tutorials/core/basic_geometric_drawing/basic_geometric_drawing.markdown +++ b/doc/tutorials/core/basic_geometric_drawing/basic_geometric_drawing.markdown @@ -82,19 +82,19 @@ Code @add_toggle_cpp - This code is in your OpenCV sample folder. Otherwise you can grab it from - [here](https://raw.githubusercontent.com/opencv/opencv/master/samples/cpp/tutorial_code/core/Matrix/Drawing_1.cpp) + [here](https://raw.githubusercontent.com/opencv/opencv/3.4/samples/cpp/tutorial_code/core/Matrix/Drawing_1.cpp) @include samples/cpp/tutorial_code/core/Matrix/Drawing_1.cpp @end_toggle @add_toggle_java - This code is in your OpenCV sample folder. Otherwise you can grab it from - [here](https://raw.githubusercontent.com/opencv/opencv/master/samples/java/tutorial_code/core/BasicGeometricDrawing/BasicGeometricDrawing.java) + [here](https://raw.githubusercontent.com/opencv/opencv/3.4/samples/java/tutorial_code/core/BasicGeometricDrawing/BasicGeometricDrawing.java) @include samples/java/tutorial_code/core/BasicGeometricDrawing/BasicGeometricDrawing.java @end_toggle @add_toggle_python - This code is in your OpenCV sample folder. Otherwise you can grab it from - [here](https://raw.githubusercontent.com/opencv/opencv/master/samples/python/tutorial_code/core/BasicGeometricDrawing/basic_geometric_drawing.py) + [here](https://raw.githubusercontent.com/opencv/opencv/3.4/samples/python/tutorial_code/core/BasicGeometricDrawing/basic_geometric_drawing.py) @include samples/python/tutorial_code/core/BasicGeometricDrawing/basic_geometric_drawing.py @end_toggle diff --git a/doc/tutorials/core/basic_linear_transform/basic_linear_transform.markdown b/doc/tutorials/core/basic_linear_transform/basic_linear_transform.markdown index a035199ee9..58d37c8570 100644 --- a/doc/tutorials/core/basic_linear_transform/basic_linear_transform.markdown +++ b/doc/tutorials/core/basic_linear_transform/basic_linear_transform.markdown @@ -185,7 +185,7 @@ and are not intended to be used as a replacement of a raster graphics editor!** ### Code -Code for the tutorial is [here](https://github.com/opencv/opencv/blob/master/samples/cpp/tutorial_code/ImgProc/changing_contrast_brightness_image/changing_contrast_brightness_image.cpp). +Code for the tutorial is [here](https://github.com/opencv/opencv/blob/3.4/samples/cpp/tutorial_code/ImgProc/changing_contrast_brightness_image/changing_contrast_brightness_image.cpp). Code for the gamma correction: @snippet changing_contrast_brightness_image.cpp changing-contrast-brightness-gamma-correction diff --git a/doc/tutorials/core/discrete_fourier_transform/discrete_fourier_transform.markdown b/doc/tutorials/core/discrete_fourier_transform/discrete_fourier_transform.markdown index 32536a5632..7facd74ca9 100644 --- a/doc/tutorials/core/discrete_fourier_transform/discrete_fourier_transform.markdown +++ b/doc/tutorials/core/discrete_fourier_transform/discrete_fourier_transform.markdown @@ -19,7 +19,7 @@ Source code @add_toggle_cpp You can [download this from here -](https://raw.githubusercontent.com/opencv/opencv/master/samples/cpp/tutorial_code/core/discrete_fourier_transform/discrete_fourier_transform.cpp) or +](https://raw.githubusercontent.com/opencv/opencv/3.4/samples/cpp/tutorial_code/core/discrete_fourier_transform/discrete_fourier_transform.cpp) or find it in the `samples/cpp/tutorial_code/core/discrete_fourier_transform/discrete_fourier_transform.cpp` of the OpenCV source code library. @@ -27,7 +27,7 @@ OpenCV source code library. @add_toggle_java You can [download this from here -](https://raw.githubusercontent.com/opencv/opencv/master/samples/java/tutorial_code/core/discrete_fourier_transform/DiscreteFourierTransform.java) or +](https://raw.githubusercontent.com/opencv/opencv/3.4/samples/java/tutorial_code/core/discrete_fourier_transform/DiscreteFourierTransform.java) or find it in the `samples/java/tutorial_code/core/discrete_fourier_transform/DiscreteFourierTransform.java` of the OpenCV source code library. @@ -35,7 +35,7 @@ OpenCV source code library. @add_toggle_python You can [download this from here -](https://raw.githubusercontent.com/opencv/opencv/master/samples/python/tutorial_code/core/discrete_fourier_transform/discrete_fourier_transform.py) or +](https://raw.githubusercontent.com/opencv/opencv/3.4/samples/python/tutorial_code/core/discrete_fourier_transform/discrete_fourier_transform.py) or find it in the `samples/python/tutorial_code/core/discrete_fourier_transform/discrete_fourier_transform.py` of the OpenCV source code library. @@ -222,7 +222,7 @@ An application idea would be to determine the geometrical orientation present in example, let us find out if a text is horizontal or not? Looking at some text you'll notice that the text lines sort of form also horizontal lines and the letters form sort of vertical lines. These two main components of a text snippet may be also seen in case of the Fourier transform. Let us use -[this horizontal ](https://raw.githubusercontent.com/opencv/opencv/master/samples/data/imageTextN.png) and [this rotated](https://raw.githubusercontent.com/opencv/opencv/master/samples/data/imageTextR.png) +[this horizontal ](https://raw.githubusercontent.com/opencv/opencv/3.4/samples/data/imageTextN.png) and [this rotated](https://raw.githubusercontent.com/opencv/opencv/3.4/samples/data/imageTextR.png) image about a text. In case of the horizontal text: diff --git a/doc/tutorials/core/how_to_use_OpenCV_parallel_for_/how_to_use_OpenCV_parallel_for_.markdown b/doc/tutorials/core/how_to_use_OpenCV_parallel_for_/how_to_use_OpenCV_parallel_for_.markdown index 18988a5535..c584a97c8a 100644 --- a/doc/tutorials/core/how_to_use_OpenCV_parallel_for_/how_to_use_OpenCV_parallel_for_.markdown +++ b/doc/tutorials/core/how_to_use_OpenCV_parallel_for_/how_to_use_OpenCV_parallel_for_.markdown @@ -7,7 +7,7 @@ Goal The goal of this tutorial is to show you how to use the OpenCV `parallel_for_` framework to easily parallelize your code. To illustrate the concept, we will write a program to draw a Mandelbrot set exploiting almost all the CPU load available. -The full tutorial code is [here](https://github.com/opencv/opencv/blob/master/samples/cpp/tutorial_code/core/how_to_use_OpenCV_parallel_for_/how_to_use_OpenCV_parallel_for_.cpp). +The full tutorial code is [here](https://github.com/opencv/opencv/blob/3.4/samples/cpp/tutorial_code/core/how_to_use_OpenCV_parallel_for_/how_to_use_OpenCV_parallel_for_.cpp). If you want more information about multithreading, you will have to refer to a reference book or course as this tutorial is intended to remain simple. @@ -175,7 +175,7 @@ C++ 11 standard allows to simplify the parallel implementation by get rid of the Results ----------- -You can find the full tutorial code [here](https://github.com/opencv/opencv/blob/master/samples/cpp/tutorial_code/core/how_to_use_OpenCV_parallel_for_/how_to_use_OpenCV_parallel_for_.cpp). +You can find the full tutorial code [here](https://github.com/opencv/opencv/blob/3.4/samples/cpp/tutorial_code/core/how_to_use_OpenCV_parallel_for_/how_to_use_OpenCV_parallel_for_.cpp). The performance of the parallel implementation depends of the type of CPU you have. For instance, on 4 cores / 8 threads CPU, you can expect a speed-up of around 6.9X. There are many factors to explain why we do not achieve a speed-up of almost 8X. Main reasons should be mostly due to: diff --git a/doc/tutorials/core/mat-mask-operations/mat_mask_operations.markdown b/doc/tutorials/core/mat-mask-operations/mat_mask_operations.markdown index 8e1febebff..bd74267f54 100644 --- a/doc/tutorials/core/mat-mask-operations/mat_mask_operations.markdown +++ b/doc/tutorials/core/mat-mask-operations/mat_mask_operations.markdown @@ -33,7 +33,7 @@ Code @add_toggle_cpp You can download this source code from [here -](https://raw.githubusercontent.com/opencv/opencv/master/samples/cpp/tutorial_code/core/mat_mask_operations/mat_mask_operations.cpp) or look in the +](https://raw.githubusercontent.com/opencv/opencv/3.4/samples/cpp/tutorial_code/core/mat_mask_operations/mat_mask_operations.cpp) or look in the OpenCV source code libraries sample directory at `samples/cpp/tutorial_code/core/mat_mask_operations/mat_mask_operations.cpp`. @include samples/cpp/tutorial_code/core/mat_mask_operations/mat_mask_operations.cpp @@ -41,7 +41,7 @@ OpenCV source code libraries sample directory at @add_toggle_java You can download this source code from [here -](https://raw.githubusercontent.com/opencv/opencv/master/samples/java/tutorial_code/core/mat_mask_operations/MatMaskOperations.java) or look in the +](https://raw.githubusercontent.com/opencv/opencv/3.4/samples/java/tutorial_code/core/mat_mask_operations/MatMaskOperations.java) or look in the OpenCV source code libraries sample directory at `samples/java/tutorial_code/core/mat_mask_operations/MatMaskOperations.java`. @include samples/java/tutorial_code/core/mat_mask_operations/MatMaskOperations.java @@ -49,7 +49,7 @@ OpenCV source code libraries sample directory at @add_toggle_python You can download this source code from [here -](https://raw.githubusercontent.com/opencv/opencv/master/samples/python/tutorial_code/core/mat_mask_operations/mat_mask_operations.py) or look in the +](https://raw.githubusercontent.com/opencv/opencv/3.4/samples/python/tutorial_code/core/mat_mask_operations/mat_mask_operations.py) or look in the OpenCV source code libraries sample directory at `samples/python/tutorial_code/core/mat_mask_operations/mat_mask_operations.py`. @include samples/python/tutorial_code/core/mat_mask_operations/mat_mask_operations.py diff --git a/doc/tutorials/dnn/dnn_googlenet/dnn_googlenet.markdown b/doc/tutorials/dnn/dnn_googlenet/dnn_googlenet.markdown index e65f70044b..1f26c37035 100644 --- a/doc/tutorials/dnn/dnn_googlenet/dnn_googlenet.markdown +++ b/doc/tutorials/dnn/dnn_googlenet/dnn_googlenet.markdown @@ -13,7 +13,7 @@ We will demonstrate results of this example on the following picture. Source Code ----------- -We will be using snippets from the example application, that can be downloaded [here](https://github.com/opencv/opencv/blob/master/samples/dnn/classification.cpp). +We will be using snippets from the example application, that can be downloaded [here](https://github.com/opencv/opencv/blob/3.4/samples/dnn/classification.cpp). @include dnn/classification.cpp diff --git a/doc/tutorials/dnn/dnn_yolo/dnn_yolo.markdown b/doc/tutorials/dnn/dnn_yolo/dnn_yolo.markdown index 968b3faaca..76bf0edd25 100644 --- a/doc/tutorials/dnn/dnn_yolo/dnn_yolo.markdown +++ b/doc/tutorials/dnn/dnn_yolo/dnn_yolo.markdown @@ -19,8 +19,8 @@ Source Code ----------- Use a universal sample for object detection models written -[in C++](https://github.com/opencv/opencv/blob/master/samples/dnn/object_detection.cpp) and -[in Python](https://github.com/opencv/opencv/blob/master/samples/dnn/object_detection.py) languages +[in C++](https://github.com/opencv/opencv/blob/3.4/samples/dnn/object_detection.cpp) and +[in Python](https://github.com/opencv/opencv/blob/3.4/samples/dnn/object_detection.py) languages Usage examples -------------- diff --git a/doc/tutorials/imgproc/gausian_median_blur_bilateral_filter/gausian_median_blur_bilateral_filter.markdown b/doc/tutorials/imgproc/gausian_median_blur_bilateral_filter/gausian_median_blur_bilateral_filter.markdown index e8f678c57f..332d894062 100644 --- a/doc/tutorials/imgproc/gausian_median_blur_bilateral_filter/gausian_median_blur_bilateral_filter.markdown +++ b/doc/tutorials/imgproc/gausian_median_blur_bilateral_filter/gausian_median_blur_bilateral_filter.markdown @@ -97,7 +97,7 @@ Code @add_toggle_cpp - **Downloadable code**: Click - [here](https://raw.githubusercontent.com/opencv/opencv/master/samples/cpp/tutorial_code/ImgProc/Smoothing/Smoothing.cpp) + [here](https://raw.githubusercontent.com/opencv/opencv/3.4/samples/cpp/tutorial_code/ImgProc/Smoothing/Smoothing.cpp) - **Code at glance:** @include samples/cpp/tutorial_code/ImgProc/Smoothing/Smoothing.cpp @@ -105,7 +105,7 @@ Code @add_toggle_java - **Downloadable code**: Click - [here](https://raw.githubusercontent.com/opencv/opencv/master/samples/java/tutorial_code/ImgProc/Smoothing/Smoothing.java) + [here](https://raw.githubusercontent.com/opencv/opencv/3.4/samples/java/tutorial_code/ImgProc/Smoothing/Smoothing.java) - **Code at glance:** @include samples/java/tutorial_code/ImgProc/Smoothing/Smoothing.java @@ -113,7 +113,7 @@ Code @add_toggle_python - **Downloadable code**: Click - [here](https://raw.githubusercontent.com/opencv/opencv/master/samples/python/tutorial_code/imgProc/Smoothing/smoothing.py) + [here](https://raw.githubusercontent.com/opencv/opencv/3.4/samples/python/tutorial_code/imgProc/Smoothing/smoothing.py) - **Code at glance:** @include samples/python/tutorial_code/imgProc/Smoothing/smoothing.py @@ -220,7 +220,7 @@ already known by now. Results ------- -- The code opens an image (in this case [lena.jpg](https://raw.githubusercontent.com/opencv/opencv/master/samples/data/lena.jpg)) +- The code opens an image (in this case [lena.jpg](https://raw.githubusercontent.com/opencv/opencv/3.4/samples/data/lena.jpg)) and display it under the effects of the 4 filters explained. - Here is a snapshot of the image smoothed using *medianBlur*: diff --git a/doc/tutorials/imgproc/hitOrMiss/hitOrMiss.markdown b/doc/tutorials/imgproc/hitOrMiss/hitOrMiss.markdown index c55f09296f..efb1f232db 100644 --- a/doc/tutorials/imgproc/hitOrMiss/hitOrMiss.markdown +++ b/doc/tutorials/imgproc/hitOrMiss/hitOrMiss.markdown @@ -48,19 +48,19 @@ The code corresponding to the previous example is shown below. @add_toggle_cpp You can also download it from -[here](https://raw.githubusercontent.com/opencv/opencv/master/samples/cpp/tutorial_code/ImgProc/HitMiss/HitMiss.cpp) +[here](https://raw.githubusercontent.com/opencv/opencv/3.4/samples/cpp/tutorial_code/ImgProc/HitMiss/HitMiss.cpp) @include samples/cpp/tutorial_code/ImgProc/HitMiss/HitMiss.cpp @end_toggle @add_toggle_java You can also download it from -[here](https://raw.githubusercontent.com/opencv/opencv/master/samples/java/tutorial_code/ImgProc/HitMiss/HitMiss.java) +[here](https://raw.githubusercontent.com/opencv/opencv/3.4/samples/java/tutorial_code/ImgProc/HitMiss/HitMiss.java) @include samples/java/tutorial_code/ImgProc/HitMiss/HitMiss.java @end_toggle @add_toggle_python You can also download it from -[here](https://raw.githubusercontent.com/opencv/opencv/master/samples/python/tutorial_code/imgProc/HitMiss/hit_miss.py) +[here](https://raw.githubusercontent.com/opencv/opencv/3.4/samples/python/tutorial_code/imgProc/HitMiss/hit_miss.py) @include samples/python/tutorial_code/imgProc/HitMiss/hit_miss.py @end_toggle diff --git a/doc/tutorials/imgproc/imgtrans/copyMakeBorder/copyMakeBorder.markdown b/doc/tutorials/imgproc/imgtrans/copyMakeBorder/copyMakeBorder.markdown index 8a4bbc0702..aba46bdab3 100644 --- a/doc/tutorials/imgproc/imgtrans/copyMakeBorder/copyMakeBorder.markdown +++ b/doc/tutorials/imgproc/imgtrans/copyMakeBorder/copyMakeBorder.markdown @@ -52,19 +52,19 @@ The tutorial code's is shown lines below. @add_toggle_cpp You can also download it from -[here](https://raw.githubusercontent.com/opencv/opencv/master/samples/cpp/tutorial_code/ImgTrans/copyMakeBorder_demo.cpp) +[here](https://raw.githubusercontent.com/opencv/opencv/3.4/samples/cpp/tutorial_code/ImgTrans/copyMakeBorder_demo.cpp) @include samples/cpp/tutorial_code/ImgTrans/copyMakeBorder_demo.cpp @end_toggle @add_toggle_java You can also download it from -[here](https://raw.githubusercontent.com/opencv/opencv/master/samples/java/tutorial_code/ImgTrans/MakeBorder/CopyMakeBorder.java) +[here](https://raw.githubusercontent.com/opencv/opencv/3.4/samples/java/tutorial_code/ImgTrans/MakeBorder/CopyMakeBorder.java) @include samples/java/tutorial_code/ImgTrans/MakeBorder/CopyMakeBorder.java @end_toggle @add_toggle_python You can also download it from -[here](https://raw.githubusercontent.com/opencv/opencv/master/samples/python/tutorial_code/ImgTrans/MakeBorder/copy_make_border.py) +[here](https://raw.githubusercontent.com/opencv/opencv/3.4/samples/python/tutorial_code/ImgTrans/MakeBorder/copy_make_border.py) @include samples/python/tutorial_code/ImgTrans/MakeBorder/copy_make_border.py @end_toggle diff --git a/doc/tutorials/imgproc/imgtrans/filter_2d/filter_2d.markdown b/doc/tutorials/imgproc/imgtrans/filter_2d/filter_2d.markdown index 454f745177..3dea827a4b 100644 --- a/doc/tutorials/imgproc/imgtrans/filter_2d/filter_2d.markdown +++ b/doc/tutorials/imgproc/imgtrans/filter_2d/filter_2d.markdown @@ -68,19 +68,19 @@ The tutorial code's is shown in the lines below. @add_toggle_cpp You can also download it from -[here](https://raw.githubusercontent.com/opencv/opencv/master/samples/cpp/tutorial_code/ImgTrans/filter2D_demo.cpp) +[here](https://raw.githubusercontent.com/opencv/opencv/3.4/samples/cpp/tutorial_code/ImgTrans/filter2D_demo.cpp) @include cpp/tutorial_code/ImgTrans/filter2D_demo.cpp @end_toggle @add_toggle_java You can also download it from -[here](https://raw.githubusercontent.com/opencv/opencv/master/samples/java/tutorial_code/ImgTrans/Filter2D/Filter2D_Demo.java) +[here](https://raw.githubusercontent.com/opencv/opencv/3.4/samples/java/tutorial_code/ImgTrans/Filter2D/Filter2D_Demo.java) @include java/tutorial_code/ImgTrans/Filter2D/Filter2D_Demo.java @end_toggle @add_toggle_python You can also download it from -[here](https://raw.githubusercontent.com/opencv/opencv/master/samples/python/tutorial_code/ImgTrans/Filter2D/filter2D.py) +[here](https://raw.githubusercontent.com/opencv/opencv/3.4/samples/python/tutorial_code/ImgTrans/Filter2D/filter2D.py) @include python/tutorial_code/ImgTrans/Filter2D/filter2D.py @end_toggle diff --git a/doc/tutorials/imgproc/imgtrans/hough_circle/hough_circle.markdown b/doc/tutorials/imgproc/imgtrans/hough_circle/hough_circle.markdown index fe2f88be15..c4a9989e2b 100644 --- a/doc/tutorials/imgproc/imgtrans/hough_circle/hough_circle.markdown +++ b/doc/tutorials/imgproc/imgtrans/hough_circle/hough_circle.markdown @@ -44,28 +44,28 @@ Code @add_toggle_cpp The sample code that we will explain can be downloaded from -[here](https://raw.githubusercontent.com/opencv/opencv/master/samples/cpp/tutorial_code/ImgTrans/houghcircles.cpp). +[here](https://raw.githubusercontent.com/opencv/opencv/3.4/samples/cpp/tutorial_code/ImgTrans/houghcircles.cpp). A slightly fancier version (which shows trackbars for changing the threshold values) can be found -[here](https://raw.githubusercontent.com/opencv/opencv/master/samples/cpp/tutorial_code/ImgTrans/HoughCircle_Demo.cpp). +[here](https://raw.githubusercontent.com/opencv/opencv/3.4/samples/cpp/tutorial_code/ImgTrans/HoughCircle_Demo.cpp). @include samples/cpp/tutorial_code/ImgTrans/houghcircles.cpp @end_toggle @add_toggle_java The sample code that we will explain can be downloaded from -[here](https://raw.githubusercontent.com/opencv/opencv/master/samples/java/tutorial_code/ImgTrans/HoughCircle/HoughCircles.java). +[here](https://raw.githubusercontent.com/opencv/opencv/3.4/samples/java/tutorial_code/ImgTrans/HoughCircle/HoughCircles.java). @include samples/java/tutorial_code/ImgTrans/HoughCircle/HoughCircles.java @end_toggle @add_toggle_python The sample code that we will explain can be downloaded from -[here](https://raw.githubusercontent.com/opencv/opencv/master/samples/python/tutorial_code/ImgTrans/HoughCircle/hough_circle.py). +[here](https://raw.githubusercontent.com/opencv/opencv/3.4/samples/python/tutorial_code/ImgTrans/HoughCircle/hough_circle.py). @include samples/python/tutorial_code/ImgTrans/HoughCircle/hough_circle.py @end_toggle Explanation ----------- -The image we used can be found [here](https://raw.githubusercontent.com/opencv/opencv/master/samples/data/smarties.png) +The image we used can be found [here](https://raw.githubusercontent.com/opencv/opencv/3.4/samples/data/smarties.png) #### Load an image: diff --git a/doc/tutorials/imgproc/imgtrans/hough_lines/hough_lines.markdown b/doc/tutorials/imgproc/imgtrans/hough_lines/hough_lines.markdown index 8b24d87a2d..d9687e2a1d 100644 --- a/doc/tutorials/imgproc/imgtrans/hough_lines/hough_lines.markdown +++ b/doc/tutorials/imgproc/imgtrans/hough_lines/hough_lines.markdown @@ -100,22 +100,22 @@ Code @add_toggle_cpp The sample code that we will explain can be downloaded from -[here](https://raw.githubusercontent.com/opencv/opencv/master/samples/cpp/tutorial_code/ImgTrans/houghlines.cpp). +[here](https://raw.githubusercontent.com/opencv/opencv/3.4/samples/cpp/tutorial_code/ImgTrans/houghlines.cpp). A slightly fancier version (which shows both Hough standard and probabilistic with trackbars for changing the threshold values) can be found -[here](https://raw.githubusercontent.com/opencv/opencv/master/samples/cpp/tutorial_code/ImgTrans/HoughLines_Demo.cpp). +[here](https://raw.githubusercontent.com/opencv/opencv/3.4/samples/cpp/tutorial_code/ImgTrans/HoughLines_Demo.cpp). @include samples/cpp/tutorial_code/ImgTrans/houghlines.cpp @end_toggle @add_toggle_java The sample code that we will explain can be downloaded from -[here](https://raw.githubusercontent.com/opencv/opencv/master/samples/java/tutorial_code/ImgTrans/HoughLine/HoughLines.java). +[here](https://raw.githubusercontent.com/opencv/opencv/3.4/samples/java/tutorial_code/ImgTrans/HoughLine/HoughLines.java). @include samples/java/tutorial_code/ImgTrans/HoughLine/HoughLines.java @end_toggle @add_toggle_python The sample code that we will explain can be downloaded from -[here](https://raw.githubusercontent.com/opencv/opencv/master/samples/python/tutorial_code/ImgTrans/HoughLine/hough_lines.py). +[here](https://raw.githubusercontent.com/opencv/opencv/3.4/samples/python/tutorial_code/ImgTrans/HoughLine/hough_lines.py). @include samples/python/tutorial_code/ImgTrans/HoughLine/hough_lines.py @end_toggle @@ -271,7 +271,7 @@ Result section. It still implements the same stuff as above, only adding the Trackbar for the Threshold. -Using an input image such as a [sudoku image](https://raw.githubusercontent.com/opencv/opencv/master/samples/data/sudoku.png). +Using an input image such as a [sudoku image](https://raw.githubusercontent.com/opencv/opencv/3.4/samples/data/sudoku.png). We get the following result by using the Standard Hough Line Transform: ![](images/hough_lines_result1.png) And by using the Probabilistic Hough Line Transform: diff --git a/doc/tutorials/imgproc/imgtrans/laplace_operator/laplace_operator.markdown b/doc/tutorials/imgproc/imgtrans/laplace_operator/laplace_operator.markdown index 63aed356b2..1ca525b5c6 100644 --- a/doc/tutorials/imgproc/imgtrans/laplace_operator/laplace_operator.markdown +++ b/doc/tutorials/imgproc/imgtrans/laplace_operator/laplace_operator.markdown @@ -55,19 +55,19 @@ Code @add_toggle_cpp -# The tutorial code's is shown lines below. You can also download it from - [here](https://raw.githubusercontent.com/opencv/opencv/master/samples/cpp/tutorial_code/ImgTrans/Laplace_Demo.cpp) + [here](https://raw.githubusercontent.com/opencv/opencv/3.4/samples/cpp/tutorial_code/ImgTrans/Laplace_Demo.cpp) @include samples/cpp/tutorial_code/ImgTrans/Laplace_Demo.cpp @end_toggle @add_toggle_java -# The tutorial code's is shown lines below. You can also download it from - [here](https://raw.githubusercontent.com/opencv/opencv/master/samples/java/tutorial_code/ImgTrans/LaPlace/LaplaceDemo.java) + [here](https://raw.githubusercontent.com/opencv/opencv/3.4/samples/java/tutorial_code/ImgTrans/LaPlace/LaplaceDemo.java) @include samples/java/tutorial_code/ImgTrans/LaPlace/LaplaceDemo.java @end_toggle @add_toggle_python -# The tutorial code's is shown lines below. You can also download it from - [here](https://raw.githubusercontent.com/opencv/opencv/master/samples/python/tutorial_code/ImgTrans/LaPlace/laplace_demo.py) + [here](https://raw.githubusercontent.com/opencv/opencv/3.4/samples/python/tutorial_code/ImgTrans/LaPlace/laplace_demo.py) @include samples/python/tutorial_code/ImgTrans/LaPlace/laplace_demo.py @end_toggle diff --git a/doc/tutorials/imgproc/imgtrans/sobel_derivatives/sobel_derivatives.markdown b/doc/tutorials/imgproc/imgtrans/sobel_derivatives/sobel_derivatives.markdown index f8725d2a12..cf335ee8f2 100644 --- a/doc/tutorials/imgproc/imgtrans/sobel_derivatives/sobel_derivatives.markdown +++ b/doc/tutorials/imgproc/imgtrans/sobel_derivatives/sobel_derivatives.markdown @@ -114,19 +114,19 @@ Code @add_toggle_cpp You can also download it from -[here](https://raw.githubusercontent.com/opencv/opencv/master/samples/cpp/tutorial_code/ImgTrans/Sobel_Demo.cpp) +[here](https://raw.githubusercontent.com/opencv/opencv/3.4/samples/cpp/tutorial_code/ImgTrans/Sobel_Demo.cpp) @include samples/cpp/tutorial_code/ImgTrans/Sobel_Demo.cpp @end_toggle @add_toggle_java You can also download it from -[here](https://raw.githubusercontent.com/opencv/opencv/master/samples/java/tutorial_code/ImgTrans/SobelDemo/SobelDemo.java) +[here](https://raw.githubusercontent.com/opencv/opencv/3.4/samples/java/tutorial_code/ImgTrans/SobelDemo/SobelDemo.java) @include samples/java/tutorial_code/ImgTrans/SobelDemo/SobelDemo.java @end_toggle @add_toggle_python You can also download it from -[here](https://raw.githubusercontent.com/opencv/opencv/master/samples/python/tutorial_code/ImgTrans/SobelDemo/sobel_demo.py) +[here](https://raw.githubusercontent.com/opencv/opencv/3.4/samples/python/tutorial_code/ImgTrans/SobelDemo/sobel_demo.py) @include samples/python/tutorial_code/ImgTrans/SobelDemo/sobel_demo.py @end_toggle diff --git a/doc/tutorials/imgproc/imgtrans/warp_affine/warp_affine.markdown b/doc/tutorials/imgproc/imgtrans/warp_affine/warp_affine.markdown index c72fc29b15..3f25509dba 100644 --- a/doc/tutorials/imgproc/imgtrans/warp_affine/warp_affine.markdown +++ b/doc/tutorials/imgproc/imgtrans/warp_affine/warp_affine.markdown @@ -90,19 +90,19 @@ Code @add_toggle_cpp - The tutorial's code is shown below. You can also download it - [here](https://raw.githubusercontent.com/opencv/opencv/master/samples/cpp/tutorial_code/ImgProc/Smoothing/Smoothing.cpp) + [here](https://raw.githubusercontent.com/opencv/opencv/3.4/samples/cpp/tutorial_code/ImgProc/Smoothing/Smoothing.cpp) @include samples/cpp/tutorial_code/ImgTrans/Geometric_Transforms_Demo.cpp @end_toggle @add_toggle_java - The tutorial's code is shown below. You can also download it - [here](https://raw.githubusercontent.com/opencv/opencv/master/samples/cpp/tutorial_code/ImgProc/Smoothing/Smoothing.cpp) + [here](https://raw.githubusercontent.com/opencv/opencv/3.4/samples/cpp/tutorial_code/ImgProc/Smoothing/Smoothing.cpp) @include samples/java/tutorial_code/ImgTrans/warp_affine/GeometricTransformsDemo.java @end_toggle @add_toggle_python - The tutorial's code is shown below. You can also download it - [here](https://raw.githubusercontent.com/opencv/opencv/master/samples/python/tutorial_code/ImgTrans/warp_affine/Geometric_Transforms_Demo.py) + [here](https://raw.githubusercontent.com/opencv/opencv/3.4/samples/python/tutorial_code/ImgTrans/warp_affine/Geometric_Transforms_Demo.py) @include samples/python/tutorial_code/ImgTrans/warp_affine/Geometric_Transforms_Demo.py @end_toggle diff --git a/doc/tutorials/imgproc/morph_lines_detection/morph_lines_detection.md b/doc/tutorials/imgproc/morph_lines_detection/morph_lines_detection.md index 4b0d3fae60..cf0e79755a 100644 --- a/doc/tutorials/imgproc/morph_lines_detection/morph_lines_detection.md +++ b/doc/tutorials/imgproc/morph_lines_detection/morph_lines_detection.md @@ -54,24 +54,24 @@ Code This tutorial code's is shown lines below. @add_toggle_cpp -You can also download it from [here](https://raw.githubusercontent.com/opencv/opencv/master/samples/cpp/tutorial_code/ImgProc/morph_lines_detection/Morphology_3.cpp). +You can also download it from [here](https://raw.githubusercontent.com/opencv/opencv/3.4/samples/cpp/tutorial_code/ImgProc/morph_lines_detection/Morphology_3.cpp). @include samples/cpp/tutorial_code/ImgProc/morph_lines_detection/Morphology_3.cpp @end_toggle @add_toggle_java -You can also download it from [here](https://raw.githubusercontent.com/opencv/opencv/master/samples/java/tutorial_code/ImgProc/morph_lines_detection/Morphology_3.java). +You can also download it from [here](https://raw.githubusercontent.com/opencv/opencv/3.4/samples/java/tutorial_code/ImgProc/morph_lines_detection/Morphology_3.java). @include samples/java/tutorial_code/ImgProc/morph_lines_detection/Morphology_3.java @end_toggle @add_toggle_python -You can also download it from [here](https://raw.githubusercontent.com/opencv/opencv/master/samples/python/tutorial_code/imgProc/morph_lines_detection/morph_lines_detection.py). +You can also download it from [here](https://raw.githubusercontent.com/opencv/opencv/3.4/samples/python/tutorial_code/imgProc/morph_lines_detection/morph_lines_detection.py). @include samples/python/tutorial_code/imgProc/morph_lines_detection/morph_lines_detection.py @end_toggle Explanation / Result -------------------- -Get image from [here](https://raw.githubusercontent.com/opencv/opencv/master/doc/tutorials/imgproc/morph_lines_detection/images/src.png) . +Get image from [here](https://raw.githubusercontent.com/opencv/opencv/3.4/doc/tutorials/imgproc/morph_lines_detection/images/src.png) . #### Load Image diff --git a/doc/tutorials/imgproc/pyramids/pyramids.markdown b/doc/tutorials/imgproc/pyramids/pyramids.markdown index b832b22fb8..9b507e604e 100644 --- a/doc/tutorials/imgproc/pyramids/pyramids.markdown +++ b/doc/tutorials/imgproc/pyramids/pyramids.markdown @@ -72,19 +72,19 @@ This tutorial code's is shown lines below. @add_toggle_cpp You can also download it from -[here](https://raw.githubusercontent.com/opencv/opencv/master/samples/cpp/tutorial_code/ImgProc/Pyramids/Pyramids.cpp) +[here](https://raw.githubusercontent.com/opencv/opencv/3.4/samples/cpp/tutorial_code/ImgProc/Pyramids/Pyramids.cpp) @include samples/cpp/tutorial_code/ImgProc/Pyramids/Pyramids.cpp @end_toggle @add_toggle_java You can also download it from -[here](https://raw.githubusercontent.com/opencv/opencv/master/samples/java/tutorial_code/ImgProc/Pyramids/Pyramids.java) +[here](https://raw.githubusercontent.com/opencv/opencv/3.4/samples/java/tutorial_code/ImgProc/Pyramids/Pyramids.java) @include samples/java/tutorial_code/ImgProc/Pyramids/Pyramids.java @end_toggle @add_toggle_python You can also download it from -[here](https://raw.githubusercontent.com/opencv/opencv/master/samples/python/tutorial_code/imgProc/Pyramids/pyramids.py) +[here](https://raw.githubusercontent.com/opencv/opencv/3.4/samples/python/tutorial_code/imgProc/Pyramids/pyramids.py) @include samples/python/tutorial_code/imgProc/Pyramids/pyramids.py @end_toggle @@ -184,7 +184,7 @@ Otherwise, an error will be shown. Results ------- -- The program calls by default an image [chicky_512.png](https://raw.githubusercontent.com/opencv/opencv/master/samples/data/chicky_512.png) +- The program calls by default an image [chicky_512.png](https://raw.githubusercontent.com/opencv/opencv/3.4/samples/data/chicky_512.png) that comes in the `samples/data` folder. Notice that this image is \f$512 \times 512\f$, hence a downsample won't generate any error (\f$512 = 2^{9}\f$). The original image is shown below: diff --git a/modules/dnn/test/pascal_semsegm_test_fcn.py b/modules/dnn/test/pascal_semsegm_test_fcn.py index af4bc2aede..0de7bf0ee3 100644 --- a/modules/dnn/test/pascal_semsegm_test_fcn.py +++ b/modules/dnn/test/pascal_semsegm_test_fcn.py @@ -205,9 +205,9 @@ if __name__ == "__main__": parser.add_argument("--val_names", help="path to file with validation set image names, download it here: " "https://github.com/shelhamer/fcn.berkeleyvision.org/blob/master/data/pascal/seg11valid.txt") parser.add_argument("--cls_file", help="path to file with colors for classes, download it here: " - "https://github.com/opencv/opencv/blob/master/samples/data/dnn/pascal-classes.txt") + "https://github.com/opencv/opencv/blob/3.4/samples/data/dnn/pascal-classes.txt") parser.add_argument("--prototxt", help="path to caffe prototxt, download it here: " - "https://github.com/opencv/opencv/blob/master/samples/data/dnn/fcn8s-heavy-pascal.prototxt") + "https://github.com/opencv/opencv/blob/3.4/samples/data/dnn/fcn8s-heavy-pascal.prototxt") parser.add_argument("--caffemodel", help="path to caffemodel file, download it here: " "http://dl.caffe.berkeleyvision.org/fcn8s-heavy-pascal.caffemodel") parser.add_argument("--log", help="path to logging file") diff --git a/samples/dnn/README.md b/samples/dnn/README.md index 4b1bcb8d0b..94460b3c0c 100644 --- a/samples/dnn/README.md +++ b/samples/dnn/README.md @@ -24,7 +24,7 @@ of values `(104, 177, 123)` for each blue, green and red channels correspondingl The following are accuracy metrics obtained using [COCO object detection evaluation tool](http://cocodataset.org/#detections-eval) on [FDDB dataset](http://vis-www.cs.umass.edu/fddb/) -(see [script](https://github.com/opencv/opencv/blob/master/modules/dnn/misc/face_detector_accuracy.py)) +(see [script](https://github.com/opencv/opencv/blob/3.4/modules/dnn/misc/face_detector_accuracy.py)) applying resize to `300x300` and keeping an origin images' sizes. ``` AP - Average Precision | FP32/FP16 | UINT8 | FP32/FP16 | UINT8 | diff --git a/samples/dnn/js_face_recognition.html b/samples/dnn/js_face_recognition.html index 887f5f1bd8..bc94783c90 100644 --- a/samples/dnn/js_face_recognition.html +++ b/samples/dnn/js_face_recognition.html @@ -69,7 +69,7 @@ function recognize(face) { function loadModels(callback) { var utils = new Utils(''); - var proto = 'https://raw.githubusercontent.com/opencv/opencv/master/samples/dnn/face_detector/deploy.prototxt'; + var proto = 'https://raw.githubusercontent.com/opencv/opencv/3.4/samples/dnn/face_detector/deploy.prototxt'; var weights = 'https://raw.githubusercontent.com/opencv/opencv_3rdparty/dnn_samples_face_detector_20180205_fp16/res10_300x300_ssd_iter_140000_fp16.caffemodel'; var recognModel = 'https://raw.githubusercontent.com/pyannote/pyannote-data/master/openface.nn4.small2.v1.t7'; utils.createFileFromUrl('face_detector.prototxt', proto, () => { From 1bc96e3ed0568412707cf9536f60a4d9e10b999a Mon Sep 17 00:00:00 2001 From: Alexander Alekhin Date: Thu, 31 May 2018 19:08:00 +0300 Subject: [PATCH 24/40] calib3d: cv::findContours() doesn't require to clone input --- modules/calib3d/src/calibinit.cpp | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/modules/calib3d/src/calibinit.cpp b/modules/calib3d/src/calibinit.cpp index 4c2d9508d1..7eade39b40 100644 --- a/modules/calib3d/src/calibinit.cpp +++ b/modules/calib3d/src/calibinit.cpp @@ -538,7 +538,11 @@ int cvFindChessboardCorners( const void* arr, CvSize pattern_size, int max_quad_buf_size = 0; cvFree(&quads); cvFree(&corners); +#ifdef USE_CV_FINDCONTOURS + Mat binarized_img = thresh_img_new; +#else Mat binarized_img = thresh_img_new.clone(); // make clone because cvFindContours modifies the source image +#endif int quad_count = icvGenerateQuads( &quads, &corners, storage, binarized_img, flags, &max_quad_buf_size ); PRINTF("Quad count: %d/%d\n", quad_count, (pattern_size.width/2+1)*(pattern_size.height/2+1)); SHOW_QUADS("New quads", thresh_img_new, quads, quad_count); @@ -604,7 +608,11 @@ int cvFindChessboardCorners( const void* arr, CvSize pattern_size, int max_quad_buf_size = 0; cvFree(&quads); cvFree(&corners); +#ifdef USE_CV_FINDCONTOURS + Mat binarized_img = thresh_img; +#else Mat binarized_img = (useAdaptive) ? thresh_img : thresh_img.clone(); // make clone because cvFindContours modifies the source image +#endif int quad_count = icvGenerateQuads( &quads, &corners, storage, binarized_img, flags, &max_quad_buf_size); PRINTF("Quad count: %d/%d\n", quad_count, (pattern_size.width/2+1)*(pattern_size.height/2+1)); SHOW_QUADS("Old quads", thresh_img, quads, quad_count); From 7d19bd6c19af51be7fd21c313a3b328f4c50cd05 Mon Sep 17 00:00:00 2001 From: Vadim Pisarevsky Date: Thu, 31 May 2018 19:36:39 +0300 Subject: [PATCH 25/40] Merge pull request #11634 from vpisarev:empty_mat_with_types_2 fixes handling of empty matrices in some functions (#11634) * a part of PR #11416 by Yuki Takehara * moved the empty mat check in Mat::copyTo() * fixed some test failures --- modules/calib3d/test/test_stereomatching.cpp | 2 +- modules/core/include/opencv2/core/mat.inl.hpp | 2 +- modules/core/src/arithm.cpp | 6 ++++++ modules/core/src/convert.cpp | 6 ++++++ modules/core/src/copy.cpp | 11 ++++++----- modules/features2d/src/agast.cpp | 6 ++++++ modules/features2d/src/fast.cpp | 6 ++++++ modules/features2d/src/gftt.cpp | 6 ++++++ modules/imgproc/src/hough.cpp | 8 ++++---- 9 files changed, 42 insertions(+), 11 deletions(-) diff --git a/modules/calib3d/test/test_stereomatching.cpp b/modules/calib3d/test/test_stereomatching.cpp index 601e0cb93b..c79f013ea5 100644 --- a/modules/calib3d/test/test_stereomatching.cpp +++ b/modules/calib3d/test/test_stereomatching.cpp @@ -556,7 +556,7 @@ int CV_StereoMatchingTest::processStereoMatchingResults( FileStorage& fs, int ca assert( fs.isOpened() ); assert( trueLeftDisp.type() == CV_32FC1 ); assert( trueRightDisp.empty() || trueRightDisp.type() == CV_32FC1 ); - assert( leftDisp.type() == CV_32FC1 && rightDisp.type() == CV_32FC1 ); + assert( leftDisp.type() == CV_32FC1 && (rightDisp.empty() || rightDisp.type() == CV_32FC1) ); // get masks for unknown ground truth disparity values Mat leftUnknMask, rightUnknMask; diff --git a/modules/core/include/opencv2/core/mat.inl.hpp b/modules/core/include/opencv2/core/mat.inl.hpp index a0c43f1534..98346f1bc0 100644 --- a/modules/core/include/opencv2/core/mat.inl.hpp +++ b/modules/core/include/opencv2/core/mat.inl.hpp @@ -1673,7 +1673,7 @@ Mat_<_Tp>& Mat_<_Tp>::operator = (const Mat& m) { return (*this = m.reshape(DataType<_Tp>::channels, m.dims, 0)); } - CV_DbgAssert(DataType<_Tp>::channels == m.channels()); + CV_Assert(DataType<_Tp>::channels == m.channels() || m.empty()); m.convertTo(*this, type()); return *this; } diff --git a/modules/core/src/arithm.cpp b/modules/core/src/arithm.cpp index 10f739c0c1..dbbe6895ed 100644 --- a/modules/core/src/arithm.cpp +++ b/modules/core/src/arithm.cpp @@ -1233,6 +1233,12 @@ void cv::compare(InputArray _src1, InputArray _src2, OutputArray _dst, int op) CV_Assert( op == CMP_LT || op == CMP_LE || op == CMP_EQ || op == CMP_NE || op == CMP_GE || op == CMP_GT ); + if(_src1.empty() && _src2.empty()) + { + _dst.release(); + return; + } + bool haveScalar = false; if ((_src1.isMatx() + _src2.isMatx()) == 1 diff --git a/modules/core/src/convert.cpp b/modules/core/src/convert.cpp index 0fb1199987..481b86b4f1 100644 --- a/modules/core/src/convert.cpp +++ b/modules/core/src/convert.cpp @@ -1304,6 +1304,12 @@ void cv::Mat::convertTo(OutputArray _dst, int _type, double alpha, double beta) { CV_INSTRUMENT_REGION() + if( empty() ) + { + _dst.release(); + return; + } + bool noScale = fabs(alpha-1) < DBL_EPSILON && fabs(beta) < DBL_EPSILON; if( _type < 0 ) diff --git a/modules/core/src/copy.cpp b/modules/core/src/copy.cpp index f4f18cb740..5e7f4a879a 100644 --- a/modules/core/src/copy.cpp +++ b/modules/core/src/copy.cpp @@ -246,13 +246,14 @@ void Mat::copyTo( OutputArray _dst ) const return; } + if( empty() ) + { + _dst.release(); + return; + } + if( _dst.isUMat() ) { - if( empty() ) - { - _dst.release(); - return; - } _dst.create( dims, size.p, type() ); UMat dst = _dst.getUMat(); CV_Assert(dst.u != NULL); diff --git a/modules/features2d/src/agast.cpp b/modules/features2d/src/agast.cpp index ab01b67805..8b63234b29 100644 --- a/modules/features2d/src/agast.cpp +++ b/modules/features2d/src/agast.cpp @@ -7952,6 +7952,12 @@ public: { CV_INSTRUMENT_REGION() + if(_image.empty()) + { + keypoints.clear(); + return; + } + Mat mask = _mask.getMat(), grayImage; UMat ugrayImage; _InputArray gray = _image; diff --git a/modules/features2d/src/fast.cpp b/modules/features2d/src/fast.cpp index 106e5802cd..e34fa8f9d0 100644 --- a/modules/features2d/src/fast.cpp +++ b/modules/features2d/src/fast.cpp @@ -526,6 +526,12 @@ public: { CV_INSTRUMENT_REGION() + if(_image.empty()) + { + keypoints.clear(); + return; + } + Mat mask = _mask.getMat(), grayImage; UMat ugrayImage; _InputArray gray = _image; diff --git a/modules/features2d/src/gftt.cpp b/modules/features2d/src/gftt.cpp index 1b8010625b..e4a594a5c6 100644 --- a/modules/features2d/src/gftt.cpp +++ b/modules/features2d/src/gftt.cpp @@ -80,6 +80,12 @@ public: { CV_INSTRUMENT_REGION() + if(_image.empty()) + { + keypoints.clear(); + return; + } + std::vector corners; if (_image.isUMat()) diff --git a/modules/imgproc/src/hough.cpp b/modules/imgproc/src/hough.cpp index ec05edf888..1f61146193 100644 --- a/modules/imgproc/src/hough.cpp +++ b/modules/imgproc/src/hough.cpp @@ -803,7 +803,7 @@ static bool ocl_HoughLines(InputArray _src, OutputArray _lines, double rho, doub int total_points = counters.getMat(ACCESS_READ).at(0, 0); if (total_points <= 0) { - _lines.assign(UMat(0,0,CV_32FC2)); + _lines.release(); return true; } @@ -831,7 +831,7 @@ static bool ocl_HoughLines(InputArray _src, OutputArray _lines, double rho, doub if (total_lines > 0) _lines.assign(lines.rowRange(Range(0, total_lines))); else - _lines.assign(UMat(0,0,CV_32FC2)); + _lines.release(); return true; } @@ -857,7 +857,7 @@ static bool ocl_HoughLinesP(InputArray _src, OutputArray _lines, double rho, dou int total_points = counters.getMat(ACCESS_READ).at(0, 0); if (total_points <= 0) { - _lines.assign(UMat(0,0,CV_32SC4)); + _lines.release(); return true; } @@ -885,7 +885,7 @@ static bool ocl_HoughLinesP(InputArray _src, OutputArray _lines, double rho, dou if (total_lines > 0) _lines.assign(lines.rowRange(Range(0, total_lines))); else - _lines.assign(UMat(0,0,CV_32SC4)); + _lines.release(); return true; } From d734e83af0dcbfe4cd349e5c2ec93cbfc297ad9e Mon Sep 17 00:00:00 2001 From: Vadim Pisarevsky Date: Thu, 31 May 2018 21:59:45 +0300 Subject: [PATCH 26/40] Hsv2rgb univ intrin (#11637) * add universal intrinsics for HSV2RGB_b * rewritten HSV2RGB_b without using extra universal intrinsics * removed unused variable * undo changes in v_load_deinterleave --- modules/imgproc/src/color_hsv.cpp | 509 +++++++++++------------------- 1 file changed, 193 insertions(+), 316 deletions(-) diff --git a/modules/imgproc/src/color_hsv.cpp b/modules/imgproc/src/color_hsv.cpp index d5a41dfcec..94a36f1106 100644 --- a/modules/imgproc/src/color_hsv.cpp +++ b/modules/imgproc/src/color_hsv.cpp @@ -213,6 +213,91 @@ struct RGB2HSV_f }; +#if CV_SIMD128 +inline void HSV2RGB_simd(v_float32x4& v_h, v_float32x4& v_s, v_float32x4& v_v, float hscale) +{ + v_h = v_h * v_setall_f32(hscale); + v_float32x4 v_pre_sector = v_cvt_f32(v_trunc(v_h)); + v_h = v_h - v_pre_sector; + v_float32x4 v_tab0 = v_v; + v_float32x4 v_one = v_setall_f32(1.0f); + v_float32x4 v_tab1 = v_v * (v_one - v_s); + v_float32x4 v_tab2 = v_v * (v_one - (v_s * v_h)); + v_float32x4 v_tab3 = v_v * (v_one - (v_s * (v_one - v_h))); + + v_float32x4 v_one_sixth = v_setall_f32(1.0f / 6.0f); + v_float32x4 v_sector = v_pre_sector * v_one_sixth; + v_sector = v_cvt_f32(v_trunc(v_sector)); + v_float32x4 v_six = v_setall_f32(6.0f); + v_sector = v_pre_sector - (v_sector * v_six); + + v_float32x4 v_two = v_setall_f32(2.0f); + v_h = v_tab1 & (v_sector < v_two); + v_h = v_h | (v_tab3 & (v_sector == v_two)); + v_float32x4 v_three = v_setall_f32(3.0f); + v_h = v_h | (v_tab0 & (v_sector == v_three)); + v_float32x4 v_four = v_setall_f32(4.0f); + v_h = v_h | (v_tab0 & (v_sector == v_four)); + v_h = v_h | (v_tab2 & (v_sector > v_four)); + + v_s = v_tab3 & (v_sector < v_one); + v_s = v_s | (v_tab0 & (v_sector == v_one)); + v_s = v_s | (v_tab0 & (v_sector == v_two)); + v_s = v_s | (v_tab2 & (v_sector == v_three)); + v_s = v_s | (v_tab1 & (v_sector > v_three)); + + v_v = v_tab0 & (v_sector < v_one); + v_v = v_v | (v_tab2 & (v_sector == v_one)); + v_v = v_v | (v_tab1 & (v_sector == v_two)); + v_v = v_v | (v_tab1 & (v_sector == v_three)); + v_v = v_v | (v_tab3 & (v_sector == v_four)); + v_v = v_v | (v_tab0 & (v_sector > v_four)); +} +#endif + + +inline void HSV2RGB_native(const float* src, float* dst, const float hscale, const int bidx) +{ + float h = src[0], s = src[1], v = src[2]; + float b, g, r; + + if( s == 0 ) + b = g = r = v; + else + { + static const int sector_data[][3]= + {{1,3,0}, {1,0,2}, {3,0,1}, {0,2,1}, {0,1,3}, {2,1,0}}; + float tab[4]; + int sector; + h *= hscale; + if( h < 0 ) + do h += 6; while( h < 0 ); + else if( h >= 6 ) + do h -= 6; while( h >= 6 ); + sector = cvFloor(h); + h -= sector; + if( (unsigned)sector >= 6u ) + { + sector = 0; + h = 0.f; + } + + tab[0] = v; + tab[1] = v*(1.f - s); + tab[2] = v*(1.f - s*h); + tab[3] = v*(1.f - s*(1.f - h)); + + b = tab[sector_data[sector][0]]; + g = tab[sector_data[sector][1]]; + r = tab[sector_data[sector][2]]; + } + + dst[bidx] = b; + dst[1] = g; + dst[bidx^2] = r; +} + + struct HSV2RGB_f { typedef float channel_type; @@ -224,152 +309,49 @@ struct HSV2RGB_f #endif } - #if CV_SIMD128 - inline void process(v_float32x4& v_h, v_float32x4& v_s, - v_float32x4& v_v, v_float32x4& v_scale) const - { - v_h = v_h * v_scale; - v_float32x4 v_pre_sector = v_cvt_f32(v_trunc(v_h)); - v_h = v_h - v_pre_sector; - v_float32x4 v_tab0 = v_v; - v_float32x4 v_one = v_setall_f32(1.0f); - v_float32x4 v_tab1 = v_v * (v_one - v_s); - v_float32x4 v_tab2 = v_v * (v_one - (v_s * v_h)); - v_float32x4 v_tab3 = v_v * (v_one - (v_s * (v_one - v_h))); - - v_float32x4 v_one_sixth = v_setall_f32(1.0f / 6.0f); - v_float32x4 v_sector = v_pre_sector * v_one_sixth; - v_sector = v_cvt_f32(v_trunc(v_sector)); - v_float32x4 v_six = v_setall_f32(6.0f); - v_sector = v_pre_sector - (v_sector * v_six); - - v_float32x4 v_two = v_setall_f32(2.0f); - v_h = v_tab1 & (v_sector < v_two); - v_h = v_h | (v_tab3 & (v_sector == v_two)); - v_float32x4 v_three = v_setall_f32(3.0f); - v_h = v_h | (v_tab0 & (v_sector == v_three)); - v_float32x4 v_four = v_setall_f32(4.0f); - v_h = v_h | (v_tab0 & (v_sector == v_four)); - v_h = v_h | (v_tab2 & (v_sector > v_four)); - - v_s = v_tab3 & (v_sector < v_one); - v_s = v_s | (v_tab0 & (v_sector == v_one)); - v_s = v_s | (v_tab0 & (v_sector == v_two)); - v_s = v_s | (v_tab2 & (v_sector == v_three)); - v_s = v_s | (v_tab1 & (v_sector > v_three)); - - v_v = v_tab0 & (v_sector < v_one); - v_v = v_v | (v_tab2 & (v_sector == v_one)); - v_v = v_v | (v_tab1 & (v_sector == v_two)); - v_v = v_v | (v_tab1 & (v_sector == v_three)); - v_v = v_v | (v_tab3 & (v_sector == v_four)); - v_v = v_v | (v_tab0 & (v_sector > v_four)); - } - #endif - void operator()(const float* src, float* dst, int n) const { int i = 0, bidx = blueIdx, dcn = dstcn; - float alpha = ColorChannel::max(); n *= 3; - #if CV_SIMD128 - if (hasSIMD) + if (dcn == 3) { - v_float32x4 v_scale = v_setall_f32(hscale); - if (dcn == 3) + #if CV_SIMD128 + if (hasSIMD) { - if (bidx) - { - for (; i <= n - 12; i += 12, dst += dcn * 4) - { - v_float32x4 v_h; - v_float32x4 v_s; - v_float32x4 v_v; - v_load_deinterleave(src + i, v_h, v_s, v_v); - process(v_h, v_s, v_v, v_scale); - v_store_interleave(dst, v_v, v_s, v_h); - } - } else { - for (; i <= n - 12; i += 12, dst += dcn * 4) - { - v_float32x4 v_h; - v_float32x4 v_s; - v_float32x4 v_v; - v_load_deinterleave(src + i, v_h, v_s, v_v); - process(v_h, v_s, v_v, v_scale); - v_store_interleave(dst, v_h, v_s, v_v); - } - } - } else { // dcn == 4 - v_float32x4 v_a = v_setall_f32(alpha); - if (bidx) + for (; i <= n - 12; i += 12, dst += dcn * 4) { - for (; i <= n - 12; i += 12, dst += dcn * 4) - { - v_float32x4 v_h; - v_float32x4 v_s; - v_float32x4 v_v; - v_load_deinterleave(src + i, v_h, v_s, v_v); - process(v_h, v_s, v_v, v_scale); - v_store_interleave(dst, v_v, v_s, v_h, v_a); - } - } else { - for (; i <= n - 12; i += 12, dst += dcn * 4) - { - v_float32x4 v_h; - v_float32x4 v_s; - v_float32x4 v_v; - v_load_deinterleave(src + i, v_h, v_s, v_v); - process(v_h, v_s, v_v, v_scale); - v_store_interleave(dst, v_h, v_s, v_v, v_a); - } + v_float32x4 v_src[3]; + v_load_deinterleave(src + i, v_src[0], v_src[1], v_src[2]); + HSV2RGB_simd(v_src[0], v_src[1], v_src[2], hscale); + v_store_interleave(dst, v_src[bidx], v_src[1], v_src[bidx^2]); } } - } - #endif - - for( ; i < n; i += 3, dst += dcn ) - { - float h = src[i], s = src[i+1], v = src[i+2]; - float b, g, r; - - if( s == 0 ) - b = g = r = v; - else + #endif + for( ; i < n; i += 3, dst += dcn ) { - static const int sector_data[][3]= - {{1,3,0}, {1,0,2}, {3,0,1}, {0,2,1}, {0,1,3}, {2,1,0}}; - float tab[4]; - int sector; - h *= hscale; - if( h < 0 ) - do h += 6; while( h < 0 ); - else if( h >= 6 ) - do h -= 6; while( h >= 6 ); - sector = cvFloor(h); - h -= sector; - if( (unsigned)sector >= 6u ) + HSV2RGB_native(src + i, dst, hscale, bidx); + } + } else { // dcn == 4 + float alpha = ColorChannel::max(); + #if CV_SIMD128 + if (hasSIMD) + { + for (; i <= n - 12; i += 12, dst += dcn * 4) { - sector = 0; - h = 0.f; + v_float32x4 v_src[3]; + v_load_deinterleave(src + i, v_src[0], v_src[1], v_src[2]); + HSV2RGB_simd(v_src[0], v_src[1], v_src[2], hscale); + v_float32x4 v_a = v_setall_f32(alpha); + v_store_interleave(dst, v_src[bidx], v_src[1], v_src[bidx^2], v_a); } - - tab[0] = v; - tab[1] = v*(1.f - s); - tab[2] = v*(1.f - s*h); - tab[3] = v*(1.f - s*(1.f - h)); - - b = tab[sector_data[sector][0]]; - g = tab[sector_data[sector][1]]; - r = tab[sector_data[sector][2]]; } - - dst[bidx] = b; - dst[1] = g; - dst[bidx^2] = r; - if( dcn == 4 ) + #endif + for( ; i < n; i += 3, dst += dcn ) + { + HSV2RGB_native(src + i, dst, hscale, bidx); dst[3] = alpha; + } } } @@ -386,216 +368,111 @@ struct HSV2RGB_b typedef uchar channel_type; HSV2RGB_b(int _dstcn, int _blueIdx, int _hrange) - : dstcn(_dstcn), cvt(3, _blueIdx, (float)_hrange) + : dstcn(_dstcn), blueIdx(_blueIdx), hscale(6.0f / _hrange) { - #if CV_NEON - v_scale_inv = vdupq_n_f32(1.f/255.f); - v_scale = vdupq_n_f32(255.f); - v_alpha = vdup_n_u8(ColorChannel::max()); - #elif CV_SSE2 - v_scale = _mm_set1_ps(255.0f); - v_alpha = _mm_set1_ps(ColorChannel::max()); - v_zero = _mm_setzero_si128(); - haveSIMD = checkHardwareSupport(CV_CPU_SSE2); + #if CV_SIMD128 + hasSIMD = hasSIMD128(); #endif } - #if CV_SSE2 - void process(__m128i v_r, __m128i v_g, __m128i v_b, - const __m128& v_coeffs_, - float * buf) const - { - __m128 v_r0 = _mm_cvtepi32_ps(_mm_unpacklo_epi16(v_r, v_zero)); - __m128 v_g0 = _mm_cvtepi32_ps(_mm_unpacklo_epi16(v_g, v_zero)); - __m128 v_b0 = _mm_cvtepi32_ps(_mm_unpacklo_epi16(v_b, v_zero)); - - __m128 v_r1 = _mm_cvtepi32_ps(_mm_unpackhi_epi16(v_r, v_zero)); - __m128 v_g1 = _mm_cvtepi32_ps(_mm_unpackhi_epi16(v_g, v_zero)); - __m128 v_b1 = _mm_cvtepi32_ps(_mm_unpackhi_epi16(v_b, v_zero)); - - __m128 v_coeffs = v_coeffs_; - - v_r0 = _mm_mul_ps(v_r0, v_coeffs); - v_g1 = _mm_mul_ps(v_g1, v_coeffs); - - v_coeffs = _mm_castsi128_ps(_mm_shuffle_epi32(_mm_castps_si128(v_coeffs), 0x49)); - - v_r1 = _mm_mul_ps(v_r1, v_coeffs); - v_b0 = _mm_mul_ps(v_b0, v_coeffs); - - v_coeffs = _mm_castsi128_ps(_mm_shuffle_epi32(_mm_castps_si128(v_coeffs), 0x49)); - - v_g0 = _mm_mul_ps(v_g0, v_coeffs); - v_b1 = _mm_mul_ps(v_b1, v_coeffs); - - _mm_store_ps(buf, v_r0); - _mm_store_ps(buf + 4, v_r1); - _mm_store_ps(buf + 8, v_g0); - _mm_store_ps(buf + 12, v_g1); - _mm_store_ps(buf + 16, v_b0); - _mm_store_ps(buf + 20, v_b1); - } - #endif - void operator()(const uchar* src, uchar* dst, int n) const { - int i, j, dcn = dstcn; + int j = 0, dcn = dstcn; uchar alpha = ColorChannel::max(); - float CV_DECL_ALIGNED(16) buf[3*BLOCK_SIZE]; - #if CV_SSE2 - __m128 v_coeffs = _mm_set_ps(1.f, 1.f/255.f, 1.f/255.f, 1.f); - #endif - for( i = 0; i < n; i += BLOCK_SIZE, src += BLOCK_SIZE*3 ) + #if CV_SIMD128 + if (hasSIMD) { - int dn = std::min(n - i, (int)BLOCK_SIZE); - j = 0; - - #if CV_NEON - for ( ; j <= (dn - 8) * 3; j += 24) - { - uint8x8x3_t v_src = vld3_u8(src + j); - uint16x8_t v_t0 = vmovl_u8(v_src.val[0]), - v_t1 = vmovl_u8(v_src.val[1]), - v_t2 = vmovl_u8(v_src.val[2]); - - float32x4x3_t v_dst; - v_dst.val[0] = vcvtq_f32_u32(vmovl_u16(vget_low_u16(v_t0))); - v_dst.val[1] = vmulq_f32(vcvtq_f32_u32(vmovl_u16(vget_low_u16(v_t1))), v_scale_inv); - v_dst.val[2] = vmulq_f32(vcvtq_f32_u32(vmovl_u16(vget_low_u16(v_t2))), v_scale_inv); - vst3q_f32(buf + j, v_dst); - - v_dst.val[0] = vcvtq_f32_u32(vmovl_u16(vget_high_u16(v_t0))); - v_dst.val[1] = vmulq_f32(vcvtq_f32_u32(vmovl_u16(vget_high_u16(v_t1))), v_scale_inv); - v_dst.val[2] = vmulq_f32(vcvtq_f32_u32(vmovl_u16(vget_high_u16(v_t2))), v_scale_inv); - vst3q_f32(buf + j + 12, v_dst); - } - #elif CV_SSE2 - if (haveSIMD) + for (j = 0; j <= (n - 16) * 3; j += 48, dst += dcn * 16) { - for ( ; j <= (dn - 8) * 3; j += 24) + v_uint8x16 h_b, s_b, v_b; + v_uint16x8 h_w[2], s_w[2], v_w[2]; + v_uint32x4 h_u[4], s_u[4], v_u[4]; + v_load_deinterleave(src + j, h_b, s_b, v_b); + v_expand(h_b, h_w[0], h_w[1]); + v_expand(s_b, s_w[0], s_w[1]); + v_expand(v_b, v_w[0], v_w[1]); + v_expand(h_w[0], h_u[0], h_u[1]); + v_expand(h_w[1], h_u[2], h_u[3]); + v_expand(s_w[0], s_u[0], s_u[1]); + v_expand(s_w[1], s_u[2], s_u[3]); + v_expand(v_w[0], v_u[0], v_u[1]); + v_expand(v_w[1], v_u[2], v_u[3]); + + v_int32x4 b_i[4], g_i[4], r_i[4]; + v_float32x4 v_coeff0 = v_setall_f32(1.0f / 255.0f); + v_float32x4 v_coeff1 = v_setall_f32(255.0f); + + for( int k = 0; k < 4; k++ ) { - __m128i v_src0 = _mm_loadu_si128((__m128i const *)(src + j)); - __m128i v_src1 = _mm_loadl_epi64((__m128i const *)(src + j + 16)); - - process(_mm_unpacklo_epi8(v_src0, v_zero), - _mm_unpackhi_epi8(v_src0, v_zero), - _mm_unpacklo_epi8(v_src1, v_zero), - v_coeffs, - buf + j); + v_float32x4 v_src[3]; + v_src[0] = v_cvt_f32(v_reinterpret_as_s32(h_u[k])); + v_src[1] = v_cvt_f32(v_reinterpret_as_s32(s_u[k])); + v_src[2] = v_cvt_f32(v_reinterpret_as_s32(v_u[k])); + + v_src[1] *= v_coeff0; + v_src[2] *= v_coeff0; + HSV2RGB_simd(v_src[0], v_src[1], v_src[2], hscale); + + v_src[0] *= v_coeff1; + v_src[1] *= v_coeff1; + v_src[2] *= v_coeff1; + b_i[k] = v_trunc(v_src[0]); + g_i[k] = v_trunc(v_src[1]); + r_i[k] = v_trunc(v_src[2]); } - } - #endif - for( ; j < dn*3; j += 3 ) - { - buf[j] = src[j]; - buf[j+1] = src[j+1]*(1.f/255.f); - buf[j+2] = src[j+2]*(1.f/255.f); - } - cvt(buf, buf, dn); + v_uint16x8 r_w[2], g_w[2], b_w[2]; + v_uint8x16 r_b, g_b, b_b; - j = 0; - #if CV_NEON - for ( ; j <= (dn - 8) * 3; j += 24, dst += dcn * 8) - { - float32x4x3_t v_src0 = vld3q_f32(buf + j), v_src1 = vld3q_f32(buf + j + 12); - uint8x8_t v_dst0 = vqmovn_u16(vcombine_u16(vqmovn_u32(cv_vrndq_u32_f32(vmulq_f32(v_src0.val[0], v_scale))), - vqmovn_u32(cv_vrndq_u32_f32(vmulq_f32(v_src1.val[0], v_scale))))); - uint8x8_t v_dst1 = vqmovn_u16(vcombine_u16(vqmovn_u32(cv_vrndq_u32_f32(vmulq_f32(v_src0.val[1], v_scale))), - vqmovn_u32(cv_vrndq_u32_f32(vmulq_f32(v_src1.val[1], v_scale))))); - uint8x8_t v_dst2 = vqmovn_u16(vcombine_u16(vqmovn_u32(cv_vrndq_u32_f32(vmulq_f32(v_src0.val[2], v_scale))), - vqmovn_u32(cv_vrndq_u32_f32(vmulq_f32(v_src1.val[2], v_scale))))); + r_w[0] = v_pack_u(r_i[0], r_i[1]); + r_w[1] = v_pack_u(r_i[2], r_i[3]); + r_b = v_pack(r_w[0], r_w[1]); + g_w[0] = v_pack_u(g_i[0], g_i[1]); + g_w[1] = v_pack_u(g_i[2], g_i[3]); + g_b = v_pack(g_w[0], g_w[1]); + b_w[0] = v_pack_u(b_i[0], b_i[1]); + b_w[1] = v_pack_u(b_i[2], b_i[3]); + b_b = v_pack(b_w[0], b_w[1]); - if (dcn == 4) + if( dcn == 3 ) { - uint8x8x4_t v_dst; - v_dst.val[0] = v_dst0; - v_dst.val[1] = v_dst1; - v_dst.val[2] = v_dst2; - v_dst.val[3] = v_alpha; - vst4_u8(dst, v_dst); + if( blueIdx == 0 ) + v_store_interleave(dst, b_b, g_b, r_b); + else + v_store_interleave(dst, r_b, g_b, b_b); } else { - uint8x8x3_t v_dst; - v_dst.val[0] = v_dst0; - v_dst.val[1] = v_dst1; - v_dst.val[2] = v_dst2; - vst3_u8(dst, v_dst); + v_uint8x16 alpha_b = v_setall_u8(alpha); + if( blueIdx == 0 ) + v_store_interleave(dst, b_b, g_b, r_b, alpha_b); + else + v_store_interleave(dst, r_b, g_b, b_b, alpha_b); } } - #elif CV_SSE2 - if (dcn == 3 && haveSIMD) - { - for ( ; j <= (dn * 3 - 16); j += 16, dst += 16) - { - __m128 v_src0 = _mm_mul_ps(_mm_load_ps(buf + j), v_scale); - __m128 v_src1 = _mm_mul_ps(_mm_load_ps(buf + j + 4), v_scale); - __m128 v_src2 = _mm_mul_ps(_mm_load_ps(buf + j + 8), v_scale); - __m128 v_src3 = _mm_mul_ps(_mm_load_ps(buf + j + 12), v_scale); - - __m128i v_dst0 = _mm_packs_epi32(_mm_cvtps_epi32(v_src0), - _mm_cvtps_epi32(v_src1)); - __m128i v_dst1 = _mm_packs_epi32(_mm_cvtps_epi32(v_src2), - _mm_cvtps_epi32(v_src3)); - - _mm_storeu_si128((__m128i *)dst, _mm_packus_epi16(v_dst0, v_dst1)); - } - - int jr = j % 3; - if (jr) - dst -= jr, j -= jr; - } - else if (dcn == 4 && haveSIMD) - { - for ( ; j <= (dn * 3 - 12); j += 12, dst += 16) - { - __m128 v_buf0 = _mm_mul_ps(_mm_load_ps(buf + j), v_scale); - __m128 v_buf1 = _mm_mul_ps(_mm_load_ps(buf + j + 4), v_scale); - __m128 v_buf2 = _mm_mul_ps(_mm_load_ps(buf + j + 8), v_scale); - - __m128 v_ba0 = _mm_unpackhi_ps(v_buf0, v_alpha); - __m128 v_ba1 = _mm_unpacklo_ps(v_buf2, v_alpha); - - __m128i v_src0 = _mm_cvtps_epi32(_mm_shuffle_ps(v_buf0, v_ba0, 0x44)); - __m128i v_src1 = _mm_shuffle_epi32(_mm_cvtps_epi32(_mm_shuffle_ps(v_ba0, v_buf1, 0x4e)), 0x78); - __m128i v_src2 = _mm_cvtps_epi32(_mm_shuffle_ps(v_buf1, v_ba1, 0x4e)); - __m128i v_src3 = _mm_shuffle_epi32(_mm_cvtps_epi32(_mm_shuffle_ps(v_ba1, v_buf2, 0xee)), 0x78); - - __m128i v_dst0 = _mm_packs_epi32(v_src0, v_src1); - __m128i v_dst1 = _mm_packs_epi32(v_src2, v_src3); - - _mm_storeu_si128((__m128i *)dst, _mm_packus_epi16(v_dst0, v_dst1)); - } - - int jr = j % 3; - if (jr) - dst -= jr, j -= jr; - } - #endif - - for( ; j < dn*3; j += 3, dst += dcn ) - { - dst[0] = saturate_cast(buf[j]*255.f); - dst[1] = saturate_cast(buf[j+1]*255.f); - dst[2] = saturate_cast(buf[j+2]*255.f); - if( dcn == 4 ) - dst[3] = alpha; - } + } + #endif + for( ; j < n * 3; j += 3, dst += dcn ) + { + float buf[6]; + buf[0] = src[j]; + buf[1] = src[j+1] * (1.0f / 255.0f); + buf[2] = src[j+2] * (1.0f / 255.0f); + HSV2RGB_native(buf, buf + 3, hscale, blueIdx); + dst[0] = saturate_cast(buf[3] * 255.0f); + dst[1] = saturate_cast(buf[4] * 255.0f); + dst[2] = saturate_cast(buf[5] * 255.0f); + if( dcn == 4 ) + dst[3] = alpha; } } int dstcn; - HSV2RGB_f cvt; - #if CV_NEON - float32x4_t v_scale, v_scale_inv; - uint8x8_t v_alpha; - #elif CV_SSE2 - __m128 v_scale; - __m128 v_alpha; - __m128i v_zero; - bool haveSIMD; + int blueIdx; + float hscale; + #if CV_SIMD128 + bool hasSIMD; #endif }; From dbab7ecac85b34b537bc30ddda875f6f8fc6ae15 Mon Sep 17 00:00:00 2001 From: Sancho McCann Date: Thu, 31 May 2018 21:09:26 +0000 Subject: [PATCH 27/40] Refactored out a dead code path. --- modules/calib3d/src/ptsetreg.cpp | 16 ++-------------- 1 file changed, 2 insertions(+), 14 deletions(-) diff --git a/modules/calib3d/src/ptsetreg.cpp b/modules/calib3d/src/ptsetreg.cpp index e26e67dc3e..1ed98756ec 100644 --- a/modules/calib3d/src/ptsetreg.cpp +++ b/modules/calib3d/src/ptsetreg.cpp @@ -78,10 +78,7 @@ class RANSACPointSetRegistrator : public PointSetRegistrator public: RANSACPointSetRegistrator(const Ptr& _cb=Ptr(), int _modelPoints=0, double _threshold=0, double _confidence=0.99, int _maxIters=1000) - : cb(_cb), modelPoints(_modelPoints), threshold(_threshold), confidence(_confidence), maxIters(_maxIters) - { - checkPartialSubsets = false; - } + : cb(_cb), modelPoints(_modelPoints), threshold(_threshold), confidence(_confidence), maxIters(_maxIters) {} int findInliers( const Mat& m1, const Mat& m2, const Mat& model, Mat& err, Mat& mask, double thresh ) const { @@ -143,17 +140,9 @@ public: ms1ptr[i*esz1 + k] = m1ptr[idx_i*esz1 + k]; for( k = 0; k < esz2; k++ ) ms2ptr[i*esz2 + k] = m2ptr[idx_i*esz2 + k]; - if( checkPartialSubsets && !cb->checkSubset( ms1, ms2, i+1 )) - { - // we may have selected some bad points; - // so, let's remove some of them randomly - i = rng.uniform(0, i+1); - iters++; - continue; - } i++; } - if( !checkPartialSubsets && i == modelPoints && !cb->checkSubset(ms1, ms2, i)) + if( i == modelPoints && !cb->checkSubset(ms1, ms2, i) ) continue; break; } @@ -261,7 +250,6 @@ public: Ptr cb; int modelPoints; - bool checkPartialSubsets; double threshold; double confidence; int maxIters; From 03edddba47fff5d2be18f9dcc6c0f0961d315190 Mon Sep 17 00:00:00 2001 From: Alexander Alekhin Date: Fri, 1 Jun 2018 12:31:48 +0300 Subject: [PATCH 28/40] core: drop unnecessary duplicate check --- modules/core/include/opencv2/core/cvstd.inl.hpp | 14 ++++---------- 1 file changed, 4 insertions(+), 10 deletions(-) diff --git a/modules/core/include/opencv2/core/cvstd.inl.hpp b/modules/core/include/opencv2/core/cvstd.inl.hpp index f1637c44f0..631fe94e1a 100644 --- a/modules/core/include/opencv2/core/cvstd.inl.hpp +++ b/modules/core/include/opencv2/core/cvstd.inl.hpp @@ -77,11 +77,8 @@ inline String::String(const std::string& str) : cstr_(0), len_(0) { - if (!str.empty()) - { - size_t len = str.size(); - if (len) memcpy(allocate(len), str.c_str(), len); - } + size_t len = str.size(); + if (len) memcpy(allocate(len), str.c_str(), len); } inline @@ -99,11 +96,8 @@ inline String& String::operator = (const std::string& str) { deallocate(); - if (!str.empty()) - { - size_t len = str.size(); - if (len) memcpy(allocate(len), str.c_str(), len); - } + size_t len = str.size(); + if (len) memcpy(allocate(len), str.c_str(), len); return *this; } From 90ba69a403f8fabf2074701fa489e7bdcfdcaf96 Mon Sep 17 00:00:00 2001 From: Vlad Kraevskiy Date: Fri, 1 Jun 2018 11:38:47 +0300 Subject: [PATCH 29/40] Fixed a typo in android toolchain documentation. --- platforms/android/android.toolchain.cmake | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/platforms/android/android.toolchain.cmake b/platforms/android/android.toolchain.cmake index e566301d81..b37dea01ae 100644 --- a/platforms/android/android.toolchain.cmake +++ b/platforms/android/android.toolchain.cmake @@ -157,7 +157,7 @@ # Silently degrades to gnustl_static if not available. # c++_static -> Use the LLVM libc++ runtime as a static library. # Implies -frtti -fexceptions. -# c++_shared -> Use the LLVM libc++ runtime as a static library. +# c++_shared -> Use the LLVM libc++ runtime as a shared library. # Implies -frtti -fno-exceptions. # # ANDROID_STL_FORCE_FEATURES=ON - turn rtti and exceptions support based on From bd7bad02a0b0c1c29fffb59c3d01882daa35fe5e Mon Sep 17 00:00:00 2001 From: Paul Jurczak Date: Fri, 1 Jun 2018 04:15:21 -0600 Subject: [PATCH 30/40] convertFp16 documentation edit (2) If this seems too wordy, take into account a new user who tries to find out extent of FP16 support in OpenCV. --- modules/core/include/opencv2/core.hpp | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/modules/core/include/opencv2/core.hpp b/modules/core/include/opencv2/core.hpp index 216a8bcc92..43da5a9bbe 100644 --- a/modules/core/include/opencv2/core.hpp +++ b/modules/core/include/opencv2/core.hpp @@ -532,8 +532,9 @@ CV_EXPORTS_W void convertScaleAbs(InputArray src, OutputArray dst, /** @brief Converts an array to half precision floating number. -This function converts FP32 (single precision floating point) from/to FP16 (half precision floating point). The input array has to have type of CV_32F or -CV_16S to represent the bit depth. If the input array is neither of them, the function will raise an error. +This function converts FP32 (single precision floating point) from/to FP16 (half precision floating point). CV_16S format is used to represent FP16 data. +There are two use modes (src -> dst): CV_32F -> CV_16S and CV_16S -> CV_32F. The input array has to have type of CV_32F or +CV_16S to represent the bit depth. If the input array is neither of them, the function will raise an error. The format of half precision floating point is defined in IEEE 754-2008. @param src input array. From ab389142afbf137636ec15ee8c01fa119432968b Mon Sep 17 00:00:00 2001 From: Dmitry Kurtaev Date: Fri, 1 Jun 2018 14:10:32 +0300 Subject: [PATCH 31/40] Fix multiple networks with Intel's Inference Engine backend --- modules/dnn/src/op_inf_engine.cpp | 34 ++++++++++++++++++------------- modules/dnn/src/op_inf_engine.hpp | 5 ++++- modules/dnn/test/test_layers.cpp | 25 +++++++++++++++++++++++ 3 files changed, 49 insertions(+), 15 deletions(-) diff --git a/modules/dnn/src/op_inf_engine.cpp b/modules/dnn/src/op_inf_engine.cpp index 710d6e5a88..43a65eb3ff 100644 --- a/modules/dnn/src/op_inf_engine.cpp +++ b/modules/dnn/src/op_inf_engine.cpp @@ -361,10 +361,20 @@ void InfEngineBackendNet::initPlugin(InferenceEngine::ICNNNetwork& net) { CV_Assert(!isInitialized()); - InferenceEngine::StatusCode status; - InferenceEngine::ResponseDesc resp; + static std::map sharedPlugins; + std::string deviceName = InferenceEngine::getDeviceName(targetDevice); + auto pluginIt = sharedPlugins.find(deviceName); + if (pluginIt != sharedPlugins.end()) + { + enginePtr = pluginIt->second; + } + else + { + enginePtr = InferenceEngine::PluginDispatcher({""}).getSuitablePlugin(targetDevice); + sharedPlugins[deviceName] = enginePtr; + } + plugin = InferenceEngine::InferencePlugin(enginePtr); - plugin = InferenceEngine::PluginDispatcher({""}).getSuitablePlugin(targetDevice); if (targetDevice == InferenceEngine::TargetDevice::eCPU) { #ifdef _WIN32 @@ -374,18 +384,17 @@ void InfEngineBackendNet::initPlugin(InferenceEngine::ICNNNetwork& net) InferenceEngine::IExtensionPtr extension = InferenceEngine::make_so_pointer("libcpu_extension.so"); #endif // _WIN32 - status = plugin->AddExtension(extension, &resp); - if (status != InferenceEngine::StatusCode::OK) - CV_Error(Error::StsAssert, resp.msg); + plugin.AddExtension(extension); } - status = plugin->LoadNetwork(net, &resp); - if (status != InferenceEngine::StatusCode::OK) - CV_Error(Error::StsAssert, resp.msg); + netExec = plugin.LoadNetwork(net, {}); + infRequest = netExec.CreateInferRequest(); + infRequest.SetInput(inpBlobs); + infRequest.SetOutput(outBlobs); } bool InfEngineBackendNet::isInitialized() { - return (bool)plugin; + return (bool)enginePtr; } void InfEngineBackendNet::addBlobs(const std::vector >& ptrs) @@ -399,10 +408,7 @@ void InfEngineBackendNet::addBlobs(const std::vector >& ptrs void InfEngineBackendNet::forward() { - InferenceEngine::ResponseDesc resp; - InferenceEngine::StatusCode status = plugin->Infer(inpBlobs, outBlobs, &resp); - if (status != InferenceEngine::StatusCode::OK) - CV_Error(Error::StsAssert, resp.msg); + infRequest.Infer(); } Mat infEngineBlobToMat(const InferenceEngine::Blob::Ptr& blob) diff --git a/modules/dnn/src/op_inf_engine.hpp b/modules/dnn/src/op_inf_engine.hpp index a61678cab2..075c1be849 100644 --- a/modules/dnn/src/op_inf_engine.hpp +++ b/modules/dnn/src/op_inf_engine.hpp @@ -89,7 +89,10 @@ private: InferenceEngine::BlobMap allBlobs; InferenceEngine::TargetDevice targetDevice; InferenceEngine::Precision precision; - InferenceEngine::InferenceEnginePluginPtr plugin; + InferenceEngine::InferenceEnginePluginPtr enginePtr; + InferenceEngine::InferencePlugin plugin; + InferenceEngine::ExecutableNetwork netExec; + InferenceEngine::InferRequest infRequest; void initPlugin(InferenceEngine::ICNNNetwork& net); }; diff --git a/modules/dnn/test/test_layers.cpp b/modules/dnn/test/test_layers.cpp index 5cbfba5517..593864822c 100644 --- a/modules/dnn/test/test_layers.cpp +++ b/modules/dnn/test/test_layers.cpp @@ -887,6 +887,31 @@ TEST(Test_DLDT, fused_output) ASSERT_NO_THROW(net.forward()); LayerFactory::unregisterLayer("Unsupported"); } + +TEST(Test_DLDT, multiple_networks) +{ + Net nets[2]; + for (int i = 0; i < 2; ++i) + { + nets[i].setInputsNames(std::vector(1, format("input_%d", i))); + + LayerParams lp; + lp.set("kernel_size", 1); + lp.set("num_output", 1); + lp.set("bias_term", false); + lp.type = "Convolution"; + lp.name = format("testConv_%d", i); + lp.blobs.push_back(Mat({1, 1, 1, 1}, CV_32F, Scalar(1 + i))); + nets[i].addLayerToPrev(lp.name, lp.type, lp); + nets[i].setPreferableBackend(DNN_BACKEND_INFERENCE_ENGINE); + nets[i].setInput(Mat({1, 1, 1, 1}, CV_32FC1, Scalar(1))); + } + Mat out_1 = nets[0].forward(); + Mat out_2 = nets[1].forward(); + // After the second model is initialized we try to receive an output from the first network again. + out_1 = nets[0].forward(); + normAssert(2 * out_1, out_2); +} #endif // HAVE_INF_ENGINE // Test a custom layer. From 4ec9afac57f159ec6138a0cdf04477406f5db51d Mon Sep 17 00:00:00 2001 From: take1014 Date: Fri, 1 Jun 2018 23:42:00 +0900 Subject: [PATCH 32/40] add imgproc snippets --- modules/imgproc/include/opencv2/imgproc.hpp | 224 +----------------- .../snippets/imgproc_HoughLinesCircles.cpp | 33 +++ .../snippets/imgproc_HoughLinesP.cpp | 31 +++ .../snippets/imgproc_applyColorMap.cpp | 32 +++ .../snippets/imgproc_calcHist.cpp | 55 +++++ .../snippets/imgproc_drawContours.cpp | 39 +++ 6 files changed, 195 insertions(+), 219 deletions(-) create mode 100644 samples/cpp/tutorial_code/snippets/imgproc_HoughLinesCircles.cpp create mode 100644 samples/cpp/tutorial_code/snippets/imgproc_HoughLinesP.cpp create mode 100644 samples/cpp/tutorial_code/snippets/imgproc_applyColorMap.cpp create mode 100644 samples/cpp/tutorial_code/snippets/imgproc_calcHist.cpp create mode 100644 samples/cpp/tutorial_code/snippets/imgproc_drawContours.cpp diff --git a/modules/imgproc/include/opencv2/imgproc.hpp b/modules/imgproc/include/opencv2/imgproc.hpp index a58b5b799b..c760a54787 100644 --- a/modules/imgproc/include/opencv2/imgproc.hpp +++ b/modules/imgproc/include/opencv2/imgproc.hpp @@ -159,40 +159,7 @@ In OpenCV you only need applyColorMap to apply a colormap on a given image. The code reads the path to an image from command line, applies a Jet colormap on it and shows the result: -@code -#include -#include -#include -#include -using namespace cv; - -#include -using namespace std; - -int main(int argc, const char *argv[]) -{ - // We need an input image. (can be grayscale or color) - if (argc < 2) - { - cerr << "We need an image to process here. Please run: colorMap [path_to_image]" << endl; - return -1; - } - Mat img_in = imread(argv[1]); - if(img_in.empty()) - { - cerr << "Sample image (" << argv[1] << ") is empty. Please adjust your path, so it points to a valid input image!" << endl; - return -1; - } - // Holds the colormap version of the image: - Mat img_color; - // Apply the colormap: - applyColorMap(img_in, img_color, COLORMAP_JET); - // Show the result: - imshow("colorMap", img_color); - waitKey(0); - return 0; -} -@endcode +@include snippets/imgproc_applyColorMap.cpp @see #ColormapTypes @@ -2007,58 +1974,7 @@ The function implements the probabilistic Hough transform algorithm for line det in @cite Matas00 See the line detection example below: - -@code - #include - #include - - using namespace cv; - using namespace std; - - int main(int argc, char** argv) - { - Mat src, dst, color_dst; - if( argc != 2 || !(src=imread(argv[1], 0)).data) - return -1; - - Canny( src, dst, 50, 200, 3 ); - cvtColor( dst, color_dst, COLOR_GRAY2BGR ); - - #if 0 - vector lines; - HoughLines( dst, lines, 1, CV_PI/180, 100 ); - - for( size_t i = 0; i < lines.size(); i++ ) - { - float rho = lines[i][0]; - float theta = lines[i][1]; - double a = cos(theta), b = sin(theta); - double x0 = a*rho, y0 = b*rho; - Point pt1(cvRound(x0 + 1000*(-b)), - cvRound(y0 + 1000*(a))); - Point pt2(cvRound(x0 - 1000*(-b)), - cvRound(y0 - 1000*(a))); - line( color_dst, pt1, pt2, Scalar(0,0,255), 3, 8 ); - } - #else - vector lines; - HoughLinesP( dst, lines, 1, CV_PI/180, 80, 30, 10 ); - for( size_t i = 0; i < lines.size(); i++ ) - { - line( color_dst, Point(lines[i][0], lines[i][1]), - Point(lines[i][2], lines[i][3]), Scalar(0,0,255), 3, 8 ); - } - #endif - namedWindow( "Source", 1 ); - imshow( "Source", src ); - - namedWindow( "Detected Lines", 1 ); - imshow( "Detected Lines", color_dst ); - - waitKey(0); - return 0; - } -@endcode +@include snippets/imgproc_HoughLinesP.cpp This is a sample picture the function parameters have been tuned for: ![image](pics/building.jpg) @@ -2114,41 +2030,7 @@ An example using the Hough circle detector The function finds circles in a grayscale image using a modification of the Hough transform. Example: : -@code - #include - #include - #include - - using namespace cv; - using namespace std; - - int main(int argc, char** argv) - { - Mat img, gray; - if( argc != 2 || !(img=imread(argv[1], 1)).data) - return -1; - cvtColor(img, gray, COLOR_BGR2GRAY); - // smooth it, otherwise a lot of false circles may be detected - GaussianBlur( gray, gray, Size(9, 9), 2, 2 ); - vector circles; - HoughCircles(gray, circles, HOUGH_GRADIENT, - 2, gray.rows/4, 200, 100 ); - for( size_t i = 0; i < circles.size(); i++ ) - { - Point center(cvRound(circles[i][0]), cvRound(circles[i][1])); - int radius = cvRound(circles[i][2]); - // draw the circle center - circle( img, center, 3, Scalar(0,255,0), -1, 8, 0 ); - // draw the circle outline - circle( img, center, radius, Scalar(0,0,255), 3, 8, 0 ); - } - namedWindow( "circles", 1 ); - imshow( "circles", img ); - - waitKey(0); - return 0; - } -@endcode +@include snippets/imgproc_HoughLinesCircles.cpp @note Usually the function detects the centers of circles well. However, it may fail to find correct radii. You can assist to the function by specifying the radius range ( minRadius and maxRadius ) if @@ -3247,63 +3129,7 @@ An example for creating histograms of an image The function cv::calcHist calculates the histogram of one or more arrays. The elements of a tuple used to increment a histogram bin are taken from the corresponding input arrays at the same location. The sample below shows how to compute a 2D Hue-Saturation histogram for a color image. : -@code - #include - #include - - using namespace cv; - - int main( int argc, char** argv ) - { - Mat src, hsv; - if( argc != 2 || !(src=imread(argv[1], 1)).data ) - return -1; - - cvtColor(src, hsv, COLOR_BGR2HSV); - - // Quantize the hue to 30 levels - // and the saturation to 32 levels - int hbins = 30, sbins = 32; - int histSize[] = {hbins, sbins}; - // hue varies from 0 to 179, see cvtColor - float hranges[] = { 0, 180 }; - // saturation varies from 0 (black-gray-white) to - // 255 (pure spectrum color) - float sranges[] = { 0, 256 }; - const float* ranges[] = { hranges, sranges }; - MatND hist; - // we compute the histogram from the 0-th and 1-st channels - int channels[] = {0, 1}; - - calcHist( &hsv, 1, channels, Mat(), // do not use mask - hist, 2, histSize, ranges, - true, // the histogram is uniform - false ); - double maxVal=0; - minMaxLoc(hist, 0, &maxVal, 0, 0); - - int scale = 10; - Mat histImg = Mat::zeros(sbins*scale, hbins*10, CV_8UC3); - - for( int h = 0; h < hbins; h++ ) - for( int s = 0; s < sbins; s++ ) - { - float binVal = hist.at(h, s); - int intensity = cvRound(binVal*255/maxVal); - rectangle( histImg, Point(h*scale, s*scale), - Point( (h+1)*scale - 1, (s+1)*scale - 1), - Scalar::all(intensity), - CV_FILLED ); - } - - namedWindow( "Source", 1 ); - imshow( "Source", src ); - - namedWindow( "H-S Histogram", 1 ); - imshow( "H-S Histogram", histImg ); - waitKey(); - } -@endcode +@include snippets/imgproc_calcHist.cpp @param images Source arrays. They all should have the same depth, CV_8U, CV_16U or CV_32F , and the same size. Each of them can have an arbitrary number of channels. @@ -4698,47 +4524,7 @@ An example using drawContours to clean up a background segmentation result The function draws contour outlines in the image if \f$\texttt{thickness} \ge 0\f$ or fills the area bounded by the contours if \f$\texttt{thickness}<0\f$ . The example below shows how to retrieve connected components from the binary image and label them: : -@code - #include "opencv2/imgproc.hpp" - #include "opencv2/highgui.hpp" - - using namespace cv; - using namespace std; - - int main( int argc, char** argv ) - { - Mat src; - // the first command-line parameter must be a filename of the binary - // (black-n-white) image - if( argc != 2 || !(src=imread(argv[1], 0)).data) - return -1; - - Mat dst = Mat::zeros(src.rows, src.cols, CV_8UC3); - - src = src > 1; - namedWindow( "Source", 1 ); - imshow( "Source", src ); - - vector > contours; - vector hierarchy; - - findContours( src, contours, hierarchy, - RETR_CCOMP, CHAIN_APPROX_SIMPLE ); - - // iterate through all the top-level contours, - // draw each connected component with its own random color - int idx = 0; - for( ; idx >= 0; idx = hierarchy[idx][0] ) - { - Scalar color( rand()&255, rand()&255, rand()&255 ); - drawContours( dst, contours, idx, color, FILLED, 8, hierarchy ); - } - - namedWindow( "Components", 1 ); - imshow( "Components", dst ); - waitKey(0); - } -@endcode +@include snippets/imgproc_drawContours.cpp @param image Destination image. @param contours All the input contours. Each contour is stored as a point vector. diff --git a/samples/cpp/tutorial_code/snippets/imgproc_HoughLinesCircles.cpp b/samples/cpp/tutorial_code/snippets/imgproc_HoughLinesCircles.cpp new file mode 100644 index 0000000000..289484dca3 --- /dev/null +++ b/samples/cpp/tutorial_code/snippets/imgproc_HoughLinesCircles.cpp @@ -0,0 +1,33 @@ +#include +#include +#include + +using namespace cv; +using namespace std; + +int main(int argc, char** argv) +{ + Mat img, gray; + if( argc != 2 || !(img=imread(argv[1], 1)).data) + return -1; + cvtColor(img, gray, COLOR_BGR2GRAY); + // smooth it, otherwise a lot of false circles may be detected + GaussianBlur( gray, gray, Size(9, 9), 2, 2 ); + vector circles; + HoughCircles(gray, circles, HOUGH_GRADIENT, + 2, gray.rows/4, 200, 100 ); + for( size_t i = 0; i < circles.size(); i++ ) + { + Point center(cvRound(circles[i][0]), cvRound(circles[i][1])); + int radius = cvRound(circles[i][2]); + // draw the circle center + circle( img, center, 3, Scalar(0,255,0), -1, 8, 0 ); + // draw the circle outline + circle( img, center, radius, Scalar(0,0,255), 3, 8, 0 ); + } + namedWindow( "circles", 1 ); + imshow( "circles", img ); + + waitKey(0); + return 0; +} diff --git a/samples/cpp/tutorial_code/snippets/imgproc_HoughLinesP.cpp b/samples/cpp/tutorial_code/snippets/imgproc_HoughLinesP.cpp new file mode 100644 index 0000000000..e19d29abbb --- /dev/null +++ b/samples/cpp/tutorial_code/snippets/imgproc_HoughLinesP.cpp @@ -0,0 +1,31 @@ +#include +#include + +using namespace cv; +using namespace std; + +int main(int argc, char** argv) +{ + Mat src, dst, color_dst; + if( argc != 2 || !(src=imread(argv[1], 0)).data) + return -1; + + Canny( src, dst, 50, 200, 3 ); + cvtColor( dst, color_dst, COLOR_GRAY2BGR ); + + vector lines; + HoughLinesP( dst, lines, 1, CV_PI/180, 80, 30, 10 ); + for( size_t i = 0; i < lines.size(); i++ ) + { + line( color_dst, Point(lines[i][0], lines[i][1]), + Point( lines[i][2], lines[i][3]), Scalar(0,0,255), 3, 8 ); + } + namedWindow( "Source", 1 ); + imshow( "Source", src ); + + namedWindow( "Detected Lines", 1 ); + imshow( "Detected Lines", color_dst ); + + waitKey(0); + return 0; +} diff --git a/samples/cpp/tutorial_code/snippets/imgproc_applyColorMap.cpp b/samples/cpp/tutorial_code/snippets/imgproc_applyColorMap.cpp new file mode 100644 index 0000000000..280beba6cc --- /dev/null +++ b/samples/cpp/tutorial_code/snippets/imgproc_applyColorMap.cpp @@ -0,0 +1,32 @@ +#include +#include +#include +#include +using namespace cv; + +#include +using namespace std; + +int main(int argc, const char *argv[]) +{ + // We need an input image. (can be grayscale or color) + if (argc < 2) + { + cerr << "We need an image to process here. Please run: colorMap [path_to_image]" << endl; + return -1; + } + Mat img_in = imread(argv[1]); + if(img_in.empty()) + { + cerr << "Sample image (" << argv[1] << ") is empty. Please adjust your path, so it points to a valid input image!" << endl; + return -1; + } + // Holds the colormap version of the image: + Mat img_color; + // Apply the colormap: + applyColorMap(img_in, img_color, COLORMAP_JET); + // Show the result: + imshow("colorMap", img_color); + waitKey(0); + return 0; +} diff --git a/samples/cpp/tutorial_code/snippets/imgproc_calcHist.cpp b/samples/cpp/tutorial_code/snippets/imgproc_calcHist.cpp new file mode 100644 index 0000000000..9d1ca46033 --- /dev/null +++ b/samples/cpp/tutorial_code/snippets/imgproc_calcHist.cpp @@ -0,0 +1,55 @@ +#include +#include + +using namespace cv; + +int main( int argc, char** argv ) +{ + Mat src, hsv; + if( argc != 2 || !(src=imread(argv[1], 1)).data ) + return -1; + + cvtColor(src, hsv, COLOR_BGR2HSV); + + // Quantize the hue to 30 levels + // and the saturation to 32 levels + int hbins = 30, sbins = 32; + int histSize[] = {hbins, sbins}; + // hue varies from 0 to 179, see cvtColor + float hranges[] = { 0, 180 }; + // saturation varies from 0 (black-gray-white) to + // 255 (pure spectrum color) + float sranges[] = { 0, 256 }; + const float* ranges[] = { hranges, sranges }; + MatND hist; + // we compute the histogram from the 0-th and 1-st channels + int channels[] = {0, 1}; + + calcHist( &hsv, 1, channels, Mat(), // do not use mask + hist, 2, histSize, ranges, + true, // the histogram is uniform + false ); + double maxVal=0; + minMaxLoc(hist, 0, &maxVal, 0, 0); + + int scale = 10; + Mat histImg = Mat::zeros(sbins*scale, hbins*10, CV_8UC3); + + for( int h = 0; h < hbins; h++ ) + for( int s = 0; s < sbins; s++ ) + { + float binVal = hist.at(h, s); + int intensity = cvRound(binVal*255/maxVal); + rectangle( histImg, Point(h*scale, s*scale), + Point( (h+1)*scale - 1, (s+1)*scale - 1), + Scalar::all(intensity), + -1 ); + } + + namedWindow( "Source", 1 ); + imshow( "Source", src ); + + namedWindow( "H-S Histogram", 1 ); + imshow( "H-S Histogram", histImg ); + waitKey(); +} diff --git a/samples/cpp/tutorial_code/snippets/imgproc_drawContours.cpp b/samples/cpp/tutorial_code/snippets/imgproc_drawContours.cpp new file mode 100644 index 0000000000..4dfcde668e --- /dev/null +++ b/samples/cpp/tutorial_code/snippets/imgproc_drawContours.cpp @@ -0,0 +1,39 @@ +#include "opencv2/imgproc.hpp" +#include "opencv2/highgui.hpp" + +using namespace cv; +using namespace std; + +int main( int argc, char** argv ) +{ + Mat src; + // the first command-line parameter must be a filename of the binary + // (black-n-white) image + if( argc != 2 || !(src=imread(argv[1], 0)).data) + return -1; + + Mat dst = Mat::zeros(src.rows, src.cols, CV_8UC3); + + src = src > 1; + namedWindow( "Source", 1 ); + imshow( "Source", src ); + + vector > contours; + vector hierarchy; + + findContours( src, contours, hierarchy, + RETR_CCOMP, CHAIN_APPROX_SIMPLE ); + + // iterate through all the top-level contours, + // draw each connected component with its own random color + int idx = 0; + for( ; idx >= 0; idx = hierarchy[idx][0] ) + { + Scalar color( rand()&255, rand()&255, rand()&255 ); + drawContours( dst, contours, idx, color, FILLED, 8, hierarchy ); + } + + namedWindow( "Components", 1 ); + imshow( "Components", dst ); + waitKey(0); +} From be8f5bb420e34f6923097771862e4f251236864e Mon Sep 17 00:00:00 2001 From: Alexander Alekhin Date: Sat, 2 Jun 2018 08:16:31 +0000 Subject: [PATCH 33/40] cmake: avoid double variable expand --- CMakeLists.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 5190c5081f..8358353399 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -340,8 +340,8 @@ OCV_OPTION(CV_ENABLE_INTRINSICS "Use intrinsic-based optimized code" ON ) OCV_OPTION(CV_DISABLE_OPTIMIZATION "Disable explicit optimized code (dispatched code/intrinsics/loop unrolling/etc)" OFF ) OCV_OPTION(CV_TRACE "Enable OpenCV code trace" ON) -OCV_OPTION(ENABLE_PYLINT "Add target with Pylint checks" (${BUILD_DOCS} OR ${BUILD_EXAMPLES}) IF (NOT CMAKE_CROSSCOMPILING AND NOT APPLE_FRAMEWORK) ) -OCV_OPTION(ENABLE_FLAKE8 "Add target with Python flake8 checker" (${BUILD_DOCS} OR ${BUILD_EXAMPLES}) IF (NOT CMAKE_CROSSCOMPILING AND NOT APPLE_FRAMEWORK) ) +OCV_OPTION(ENABLE_PYLINT "Add target with Pylint checks" (BUILD_DOCS OR BUILD_EXAMPLES) IF (NOT CMAKE_CROSSCOMPILING AND NOT APPLE_FRAMEWORK) ) +OCV_OPTION(ENABLE_FLAKE8 "Add target with Python flake8 checker" (BUILD_DOCS OR BUILD_EXAMPLES) IF (NOT CMAKE_CROSSCOMPILING AND NOT APPLE_FRAMEWORK) ) if(ENABLE_IMPL_COLLECTION) add_definitions(-DCV_COLLECT_IMPL_DATA) From e657e170ebc7e332e873d71adacd8adbb2ae8d8d Mon Sep 17 00:00:00 2001 From: Alexander Alekhin Date: Sat, 2 Jun 2018 08:27:34 +0000 Subject: [PATCH 34/40] cmake: ENABLE_PRECOMPILED_HEADERS for MSVC cross-compiling MSVC ARM / WinRT builds --- CMakeLists.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 5190c5081f..7ba8cc1665 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -316,7 +316,7 @@ OCV_OPTION(INSTALL_TESTS "Install accuracy and performance test binar # OpenCV build options # =================================================== OCV_OPTION(ENABLE_CCACHE "Use ccache" (UNIX AND NOT IOS AND (CMAKE_GENERATOR MATCHES "Makefile" OR CMAKE_GENERATOR MATCHES "Ninja")) ) -OCV_OPTION(ENABLE_PRECOMPILED_HEADERS "Use precompiled headers" ON IF (NOT IOS AND NOT CMAKE_CROSSCOMPILING) ) +OCV_OPTION(ENABLE_PRECOMPILED_HEADERS "Use precompiled headers" ON IF (MSVC OR (NOT IOS AND NOT CMAKE_CROSSCOMPILING) ) ) OCV_OPTION(ENABLE_SOLUTION_FOLDERS "Solution folder in Visual Studio or in other IDEs" (MSVC_IDE OR CMAKE_GENERATOR MATCHES Xcode) ) OCV_OPTION(ENABLE_PROFILING "Enable profiling in the GCC compiler (Add flags: -g -pg)" OFF IF CV_GCC ) OCV_OPTION(ENABLE_COVERAGE "Enable coverage collection with GCov" OFF IF CV_GCC ) From ec42d872043f07ed194383703ded05ed6c0a5f62 Mon Sep 17 00:00:00 2001 From: Kuang Fangjun Date: Sat, 2 Jun 2018 19:16:56 +0800 Subject: [PATCH 35/40] fix #11673. --- modules/viz/src/shapes.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/viz/src/shapes.cpp b/modules/viz/src/shapes.cpp index 1dfa7c38e1..30fde1cc5c 100644 --- a/modules/viz/src/shapes.cpp +++ b/modules/viz/src/shapes.cpp @@ -528,7 +528,7 @@ void cv::viz::WText3D::setText(const String &text) cv::String cv::viz::WText3D::getText() const { - vtkFollower *actor = vtkFollower::SafeDownCast(WidgetAccessor::getProp(*this)); + vtkActor *actor = vtkActor::SafeDownCast(WidgetAccessor::getProp(*this)); CV_Assert("This widget does not support text." && actor); vtkPolyDataMapper *mapper = vtkPolyDataMapper::SafeDownCast(actor->GetMapper()); From 9ae28415ec6c9a720f1e1ad3fd9333d58cdeb012 Mon Sep 17 00:00:00 2001 From: Kuang Fangjun Date: Sun, 3 Jun 2018 07:21:08 +0800 Subject: [PATCH 36/40] fix doc. --- CMakeLists.txt | 10 +++++----- cmake/FindCUDA.cmake | 12 ++++++------ cmake/OpenCVCompilerOptions.cmake | 2 +- cmake/OpenCVFindLibsPerf.cmake | 2 +- cmake/OpenCVModule.cmake | 6 +++--- cmake/OpenCVPCHSupport.cmake | 2 +- .../viz/launching_viz/launching_viz.markdown | 2 +- modules/dnn/include/opencv2/dnn.hpp | 6 +++--- modules/dnn/include/opencv2/dnn/all_layers.hpp | 16 ++++++++-------- modules/dnn/include/opencv2/dnn/dnn.hpp | 16 ++++++++-------- modules/dnn/misc/quantize_face_detector.py | 4 ++-- modules/dnn/src/dnn.cpp | 6 +++--- modules/dnn/src/halide_scheduler.cpp | 2 +- modules/dnn/src/layers/convolution_layer.cpp | 2 +- .../dnn/src/layers/detection_output_layer.cpp | 2 +- modules/dnn/src/layers/eltwise_layer.cpp | 2 +- modules/dnn/src/layers/prior_box_layer.cpp | 6 +++--- .../dnn/src/ocl4dnn/src/ocl4dnn_conv_spatial.cpp | 2 +- modules/dnn/src/op_inf_engine.cpp | 4 ++-- modules/dnn/src/tensorflow/graph.proto | 2 +- modules/dnn/src/torch/torch_importer.cpp | 6 +++--- modules/dnn/test/test_darknet_importer.cpp | 4 ++-- modules/dnn/test/test_torch_importer.cpp | 2 +- modules/viz/CMakeLists.txt | 2 +- modules/viz/include/opencv2/viz/widgets.hpp | 2 +- 25 files changed, 61 insertions(+), 61 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 5190c5081f..4c0a2a848f 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -32,7 +32,7 @@ endif() option(ENABLE_PIC "Generate position independent code (necessary for shared libraries)" TRUE) set(CMAKE_POSITION_INDEPENDENT_CODE ${ENABLE_PIC}) -# Following block can break build in case of cross-compilng +# Following block can break build in case of cross-compiling # but CMAKE_CROSSCOMPILING variable will be set only on project(OpenCV) command # so we will try to detect cross-compiling by the presence of CMAKE_TOOLCHAIN_FILE if(NOT DEFINED CMAKE_INSTALL_PREFIX) @@ -43,17 +43,17 @@ if(NOT DEFINED CMAKE_INSTALL_PREFIX) else() set(CMAKE_INSTALL_PREFIX "/usr/local" CACHE PATH "Installation Directory") endif() - else(NOT CMAKE_TOOLCHAIN_FILE) + else() #Android: set output folder to ${CMAKE_BINARY_DIR} - set( LIBRARY_OUTPUT_PATH_ROOT ${CMAKE_BINARY_DIR} CACHE PATH "root for library output, set this to change where android libs are compiled to" ) + set(LIBRARY_OUTPUT_PATH_ROOT ${CMAKE_BINARY_DIR} CACHE PATH "root for library output, set this to change where android libs are compiled to" ) # any cross-compiling set(CMAKE_INSTALL_PREFIX "${CMAKE_BINARY_DIR}/install" CACHE PATH "Installation Directory") - endif(NOT CMAKE_TOOLCHAIN_FILE) + endif() endif() if(CMAKE_SYSTEM_NAME MATCHES WindowsPhone OR CMAKE_SYSTEM_NAME MATCHES WindowsStore) set(WINRT TRUE) -endif(CMAKE_SYSTEM_NAME MATCHES WindowsPhone OR CMAKE_SYSTEM_NAME MATCHES WindowsStore) +endif() if(WINRT) add_definitions(-DWINRT -DNO_GETENV) diff --git a/cmake/FindCUDA.cmake b/cmake/FindCUDA.cmake index bbdfb91a07..632b8c8285 100644 --- a/cmake/FindCUDA.cmake +++ b/cmake/FindCUDA.cmake @@ -1042,7 +1042,7 @@ function(CUDA_COMPUTE_BUILD_PATH path build_path) # Only deal with CMake style paths from here on out file(TO_CMAKE_PATH "${path}" bpath) if (IS_ABSOLUTE "${bpath}") - # Absolute paths are generally unnessary, especially if something like + # Absolute paths are generally unnecessary, especially if something like # file(GLOB_RECURSE) is used to pick up the files. string(FIND "${bpath}" "${CMAKE_CURRENT_BINARY_DIR}" _binary_dir_pos) @@ -1065,7 +1065,7 @@ function(CUDA_COMPUTE_BUILD_PATH path build_path) # Avoid spaces string(REPLACE " " "_" bpath "${bpath}") - # Strip off the filename. I wait until here to do it, since removin the + # Strip off the filename. I wait until here to do it, since removing the # basename can make a path that looked like path/../basename turn into # path/.. (notice the trailing slash). get_filename_component(bpath "${bpath}" PATH) @@ -1362,7 +1362,7 @@ macro(CUDA_WRAP_SRCS cuda_target format generated_files) # Bring in the dependencies. Creates a variable CUDA_NVCC_DEPEND ####### cuda_include_nvcc_dependencies(${cmake_dependency_file}) - # Convience string for output ########################################### + # Convenience string for output ########################################### if(CUDA_BUILD_EMULATION) set(cuda_build_type "Emulation") else() @@ -1563,7 +1563,7 @@ macro(CUDA_ADD_LIBRARY cuda_target) ${_cmake_options} ${_cuda_shared_flag} OPTIONS ${_options} ) - # Compute the file name of the intermedate link file used for separable + # Compute the file name of the intermediate link file used for separable # compilation. CUDA_COMPUTE_SEPARABLE_COMPILATION_OBJECT_FILE_NAME(link_file ${cuda_target} "${${cuda_target}_SEPARABLE_COMPILATION_OBJECTS}") @@ -1607,7 +1607,7 @@ macro(CUDA_ADD_EXECUTABLE cuda_target) # Create custom commands and targets for each file. CUDA_WRAP_SRCS( ${cuda_target} OBJ _generated_files ${_sources} OPTIONS ${_options} ) - # Compute the file name of the intermedate link file used for separable + # Compute the file name of the intermediate link file used for separable # compilation. CUDA_COMPUTE_SEPARABLE_COMPILATION_OBJECT_FILE_NAME(link_file ${cuda_target} "${${cuda_target}_SEPARABLE_COMPILATION_OBJECTS}") @@ -1723,7 +1723,7 @@ endmacro() ############################################################################### ############################################################################### macro(CUDA_BUILD_CLEAN_TARGET) - # Call this after you add all your CUDA targets, and you will get a convience + # Call this after you add all your CUDA targets, and you will get a convenience # target. You should also make clean after running this target to get the # build system to generate all the code again. diff --git a/cmake/OpenCVCompilerOptions.cmake b/cmake/OpenCVCompilerOptions.cmake index d83777fe4b..30e4a00a3f 100644 --- a/cmake/OpenCVCompilerOptions.cmake +++ b/cmake/OpenCVCompilerOptions.cmake @@ -1,5 +1,5 @@ if("${CMAKE_CXX_COMPILER};${CMAKE_C_COMPILER};${CMAKE_CXX_COMPILER_LAUNCHER}" MATCHES "ccache") - set(CMAKE_COMPILER_IS_CCACHE 1) # FIXIT Avoid setting of CMAKE_ variables + set(CMAKE_COMPILER_IS_CCACHE 1) # TODO: FIXIT Avoid setting of CMAKE_ variables set(OPENCV_COMPILER_IS_CCACHE 1) endif() function(access_CMAKE_COMPILER_IS_CCACHE) diff --git a/cmake/OpenCVFindLibsPerf.cmake b/cmake/OpenCVFindLibsPerf.cmake index 59c9c4ffca..4dfd7aab4b 100644 --- a/cmake/OpenCVFindLibsPerf.cmake +++ b/cmake/OpenCVFindLibsPerf.cmake @@ -43,7 +43,7 @@ endif(WITH_IPP_A) if(WITH_CUDA) include("${OpenCV_SOURCE_DIR}/cmake/OpenCVDetectCUDA.cmake") if(NOT HAVE_CUDA) - message(WARNING "OpenCV is not able to find/confidure CUDA SDK (required by WITH_CUDA). + message(WARNING "OpenCV is not able to find/configure CUDA SDK (required by WITH_CUDA). CUDA support will be disabled in OpenCV build. To eliminate this warning remove WITH_CUDA=ON CMake configuration option. ") diff --git a/cmake/OpenCVModule.cmake b/cmake/OpenCVModule.cmake index 93b6123eba..db439b3981 100644 --- a/cmake/OpenCVModule.cmake +++ b/cmake/OpenCVModule.cmake @@ -455,7 +455,7 @@ function(__ocv_sort_modules_by_deps __lst) set(${__lst} "${result};${result_extra}" PARENT_SCOPE) endfunction() -# resolve dependensies +# resolve dependencies function(__ocv_resolve_dependencies) foreach(m ${OPENCV_MODULES_DISABLED_USER}) set(HAVE_${m} OFF CACHE INTERNAL "Module ${m} will not be built in current configuration") @@ -727,7 +727,7 @@ macro(ocv_set_module_sources) endif() endforeach() - # the hacky way to embeed any files into the OpenCV without modification of its build system + # the hacky way to embed any files into the OpenCV without modification of its build system if(COMMAND ocv_get_module_external_sources) ocv_get_module_external_sources() endif() @@ -958,7 +958,7 @@ macro(_ocv_create_module) target_compile_definitions(${the_module} PRIVATE CVAPI_EXPORTS) endif() - # For dynamic link numbering convenions + # For dynamic link numbering conventions if(NOT ANDROID) # Android SDK build scripts can include only .so files into final .apk # As result we should not set version properties for Android diff --git a/cmake/OpenCVPCHSupport.cmake b/cmake/OpenCVPCHSupport.cmake index b1dd60e849..b4658c604b 100644 --- a/cmake/OpenCVPCHSupport.cmake +++ b/cmake/OpenCVPCHSupport.cmake @@ -383,7 +383,7 @@ MACRO(ADD_NATIVE_PRECOMPILED_HEADER _targetName _input) # For Xcode, cmake needs my patch to process # GCC_PREFIX_HEADER and GCC_PRECOMPILE_PREFIX_HEADER as target properties - # When buiding out of the tree, precompiled may not be located + # When building out of the tree, precompiled may not be located # Use full path instead. GET_FILENAME_COMPONENT(fullPath ${_input} ABSOLUTE) diff --git a/doc/tutorials/viz/launching_viz/launching_viz.markdown b/doc/tutorials/viz/launching_viz/launching_viz.markdown index 5dc6a85883..6a02b9b7ad 100644 --- a/doc/tutorials/viz/launching_viz/launching_viz.markdown +++ b/doc/tutorials/viz/launching_viz/launching_viz.markdown @@ -37,7 +37,7 @@ Here is the general structure of the program: the same with **myWindow**. If the name does not exist, a new window is created. @code{.cpp} /// Access window via its name - viz::Viz3d sameWindow = viz::get("Viz Demo"); + viz::Viz3d sameWindow = viz::getWindowByName("Viz Demo"); @endcode - Start a controlled event loop. Once it starts, **wasStopped** is set to false. Inside the while loop, in each iteration, **spinOnce** is called to prevent event loop from completely stopping. diff --git a/modules/dnn/include/opencv2/dnn.hpp b/modules/dnn/include/opencv2/dnn.hpp index 57a564bf11..af919005f6 100644 --- a/modules/dnn/include/opencv2/dnn.hpp +++ b/modules/dnn/include/opencv2/dnn.hpp @@ -42,7 +42,7 @@ #ifndef OPENCV_DNN_HPP #define OPENCV_DNN_HPP -// This is an umbrealla header to include into you project. +// This is an umbrella header to include into you project. // We are free to change headers layout in dnn subfolder, so please include // this header for future compatibility @@ -52,10 +52,10 @@ This module contains: - API for new layers creation, layers are building bricks of neural networks; - set of built-in most-useful Layers; - - API to constuct and modify comprehensive neural networks from layers; + - API to construct and modify comprehensive neural networks from layers; - functionality for loading serialized networks models from different frameworks. - Functionality of this module is designed only for forward pass computations (i. e. network testing). + Functionality of this module is designed only for forward pass computations (i.e. network testing). A network training is in principle not supported. @} */ diff --git a/modules/dnn/include/opencv2/dnn/all_layers.hpp b/modules/dnn/include/opencv2/dnn/all_layers.hpp index ffb09a2b95..cc8521586c 100644 --- a/modules/dnn/include/opencv2/dnn/all_layers.hpp +++ b/modules/dnn/include/opencv2/dnn/all_layers.hpp @@ -58,7 +58,7 @@ CV__DNN_EXPERIMENTAL_NS_BEGIN You can use both API, but factory API is less convenient for native C++ programming and basically designed for use inside importers (see @ref readNetFromCaffe(), @ref readNetFromTorch(), @ref readNetFromTensorflow()). Built-in layers partially reproduce functionality of corresponding Caffe and Torch7 layers. - In partuclar, the following layers and Caffe importer were tested to reproduce Caffe functionality: + In particular, the following layers and Caffe importer were tested to reproduce Caffe functionality: - Convolution - Deconvolution - Pooling @@ -108,13 +108,13 @@ CV__DNN_EXPERIMENTAL_NS_BEGIN @f$W_{x?} \in R^{N_h \times N_x}@f$, @f$W_{h?} \in R^{N_h \times N_h}@f$, @f$b_? \in R^{N_h}@f$. For simplicity and performance purposes we use @f$ W_x = [W_{xi}; W_{xf}; W_{xo}, W_{xg}] @f$ - (i.e. @f$W_x@f$ is vertical contacentaion of @f$ W_{x?} @f$), @f$ W_x \in R^{4N_h \times N_x} @f$. + (i.e. @f$W_x@f$ is vertical concatenation of @f$ W_{x?} @f$), @f$ W_x \in R^{4N_h \times N_x} @f$. The same for @f$ W_h = [W_{hi}; W_{hf}; W_{ho}, W_{hg}], W_h \in R^{4N_h \times N_h} @f$ and for @f$ b = [b_i; b_f, b_o, b_g]@f$, @f$b \in R^{4N_h} @f$. - @param Wh is matrix defining how previous output is transformed to internal gates (i.e. according to abovemtioned notation is @f$ W_h @f$) - @param Wx is matrix defining how current input is transformed to internal gates (i.e. according to abovemtioned notation is @f$ W_x @f$) - @param b is bias vector (i.e. according to abovemtioned notation is @f$ b @f$) + @param Wh is matrix defining how previous output is transformed to internal gates (i.e. according to above mentioned notation is @f$ W_h @f$) + @param Wx is matrix defining how current input is transformed to internal gates (i.e. according to above mentioned notation is @f$ W_x @f$) + @param b is bias vector (i.e. according to above mentioned notation is @f$ b @f$) */ CV_DEPRECATED virtual void setWeights(const Mat &Wh, const Mat &Wx, const Mat &b) = 0; @@ -148,7 +148,7 @@ CV__DNN_EXPERIMENTAL_NS_BEGIN * If setUseTimstampsDim() is set to true then @p input[0] should has at least two dimensions with the following shape: [`T`, `N`, `[data dims]`], * where `T` specifies number of timestamps, `N` is number of independent streams (i.e. @f$ x_{t_0 + t}^{stream} @f$ is stored inside @p input[0][t, stream, ...]). * - * If setUseTimstampsDim() is set to fase then @p input[0] should contain single timestamp, its shape should has form [`N`, `[data dims]`] with at least one dimension. + * If setUseTimstampsDim() is set to false then @p input[0] should contain single timestamp, its shape should has form [`N`, `[data dims]`] with at least one dimension. * (i.e. @f$ x_{t}^{stream} @f$ is stored inside @p input[0][stream, ...]). */ @@ -550,7 +550,7 @@ CV__DNN_EXPERIMENTAL_NS_BEGIN * dst(x, y, c) = \frac{ src(x, y, c) }{norm(c)} * @f] * - * Where `x, y` - spatial cooridnates, `c` - channel. + * Where `x, y` - spatial coordinates, `c` - channel. * * An every sample in the batch is normalized separately. Optionally, * output is scaled by the trained parameters. @@ -565,7 +565,7 @@ CV__DNN_EXPERIMENTAL_NS_BEGIN }; /** - * @brief Resize input 4-dimensional blob by nearest neghbor strategy. + * @brief Resize input 4-dimensional blob by nearest neighbor strategy. * * Layer is used to support TensorFlow's resize_nearest_neighbor op. */ diff --git a/modules/dnn/include/opencv2/dnn/dnn.hpp b/modules/dnn/include/opencv2/dnn/dnn.hpp index 3a1108663c..2a1d68af7e 100644 --- a/modules/dnn/include/opencv2/dnn/dnn.hpp +++ b/modules/dnn/include/opencv2/dnn/dnn.hpp @@ -87,7 +87,7 @@ CV__DNN_EXPERIMENTAL_NS_BEGIN /** @brief This class provides all data needed to initialize layer. * - * It includes dictionary with scalar params (which can be readed by using Dict interface), + * It includes dictionary with scalar params (which can be read by using Dict interface), * blob params #blobs and optional meta information: #name and #type of layer instance. */ class CV_EXPORTS LayerParams : public Dict @@ -138,7 +138,7 @@ CV__DNN_EXPERIMENTAL_NS_BEGIN * Initialize wrapper from another one. It'll wrap the same host CPU * memory and mustn't allocate memory on device(i.e. GPU). It might * has different shape. Use in case of CPU memory reusing for reuse - * associented memory on device too. + * associated memory on device too. */ BackendWrapper(const Ptr& base, const MatShape& shape); @@ -346,7 +346,7 @@ CV__DNN_EXPERIMENTAL_NS_BEGIN /** @brief Create a network from Intel's Model Optimizer intermediate representation. * @param[in] xml XML configuration file with network's topology. * @param[in] bin Binary file with trained weights. - * Networks imported from Intel's Model Optimizer are lauched in Intel's Inference Engine + * Networks imported from Intel's Model Optimizer are launched in Intel's Inference Engine * backend. */ CV_WRAP static Net readFromModelOptimizer(const String& xml, const String& bin); @@ -402,8 +402,8 @@ CV__DNN_EXPERIMENTAL_NS_BEGIN /** @brief Connects #@p outNum output of the first layer to #@p inNum input of the second layer. * @param outLayerId identifier of the first layer - * @param inpLayerId identifier of the second layer * @param outNum number of the first layer output + * @param inpLayerId identifier of the second layer * @param inpNum number of the second layer input */ void connect(int outLayerId, int outNum, int inpLayerId, int inpNum); @@ -564,7 +564,7 @@ CV__DNN_EXPERIMENTAL_NS_BEGIN */ CV_WRAP int getLayersCount(const String& layerType) const; - /** @brief Computes bytes number which are requered to store + /** @brief Computes bytes number which are required to store * all weights and intermediate blobs for model. * @param netInputShapes vector of shapes for all net inputs. * @param weights output parameter to store resulting bytes for weights. @@ -584,7 +584,7 @@ CV__DNN_EXPERIMENTAL_NS_BEGIN const MatShape& netInputShape, CV_OUT size_t& weights, CV_OUT size_t& blobs) const; - /** @brief Computes bytes number which are requered to store + /** @brief Computes bytes number which are required to store * all weights and intermediate blobs for each layer. * @param netInputShapes vector of shapes for all net inputs. * @param layerIds output vector to save layer IDs. @@ -727,7 +727,7 @@ CV__DNN_EXPERIMENTAL_NS_BEGIN * @param[in] xml XML configuration file with network's topology. * @param[in] bin Binary file with trained weights. * @returns Net object. - * Networks imported from Intel's Model Optimizer are lauched in Intel's Inference Engine + * Networks imported from Intel's Model Optimizer are launched in Intel's Inference Engine * backend. */ CV_EXPORTS_W Net readNetFromModelOptimizer(const String &xml, const String &bin); @@ -745,7 +745,7 @@ CV__DNN_EXPERIMENTAL_NS_BEGIN * @details if @p crop is true, input image is resized so one side after resize is equal to corresponding * dimension in @p size and another one is equal or larger. Then, crop from the center is performed. * If @p crop is false, direct resize without cropping and preserving aspect ratio is performed. - * @returns 4-dimansional Mat with NCHW dimensions order. + * @returns 4-dimensional Mat with NCHW dimensions order. */ CV_EXPORTS_W Mat blobFromImage(InputArray image, double scalefactor=1.0, const Size& size = Size(), const Scalar& mean = Scalar(), bool swapRB=true, bool crop=true); diff --git a/modules/dnn/misc/quantize_face_detector.py b/modules/dnn/misc/quantize_face_detector.py index c66b735847..8a8b88d181 100644 --- a/modules/dnn/misc/quantize_face_detector.py +++ b/modules/dnn/misc/quantize_face_detector.py @@ -223,9 +223,9 @@ with tf.Session() as sess: # By default, float16 weights are stored in repeated tensor's field called # `half_val`. It has type int32 with leading zeros for unused bytes. - # This type is encoded by Varint that means only 7 bits are used for value + # This type is encoded by Variant that means only 7 bits are used for value # representation but the last one is indicated the end of encoding. This way - # float16 might takes 1 or 2 or 3 bytes depends on value. To impove compression, + # float16 might takes 1 or 2 or 3 bytes depends on value. To improve compression, # we replace all `half_val` values to `tensor_content` using only 2 bytes for everyone. for node in graph_def.node: if 'value' in node.attr: diff --git a/modules/dnn/src/dnn.cpp b/modules/dnn/src/dnn.cpp index a5656821c6..6318863b58 100644 --- a/modules/dnn/src/dnn.cpp +++ b/modules/dnn/src/dnn.cpp @@ -541,7 +541,7 @@ public: { // if dst already has been allocated with total(shape) elements, - // it won't be recrreated and pointer of dst.data remains the same. + // it won't be recreated and pointer of dst.data remains the same. dst.create(shape, use_half ? CV_16S : CV_32F); addHost(lp, dst); } @@ -1520,7 +1520,7 @@ struct Net::Impl } } - // fuse convlution layer followed by eltwise + relu + // fuse convolution layer followed by eltwise + relu if ( IS_DNN_OPENCL_TARGET(preferableTarget) ) { Ptr nextEltwiseLayer; @@ -1649,7 +1649,7 @@ struct Net::Impl // the optimization #3. if there is concat layer that concatenates channels // from the inputs together (i.e. axis == 1) then we make the inputs of - // the concat layer to write to the concatetion output buffer + // the concat layer to write to the concatenation output buffer // (and so we eliminate the concatenation layer, because the channels // are concatenated implicitly). Ptr concatLayer = ld.layerInstance.dynamicCast(); diff --git a/modules/dnn/src/halide_scheduler.cpp b/modules/dnn/src/halide_scheduler.cpp index a2cb410e7e..78335ddaf9 100644 --- a/modules/dnn/src/halide_scheduler.cpp +++ b/modules/dnn/src/halide_scheduler.cpp @@ -242,7 +242,7 @@ bool HalideScheduler::process(Ptr& node) std::map funcsMap; // Scheduled functions. // For every function, from top to bottom, we try to find a scheduling node. // Scheduling is successful (return true) if for the first function (top) - // node is respresented. + // node is represented. CV_Assert(!node.empty()); std::vector& funcs = node.dynamicCast()->funcs; for (int i = funcs.size() - 1; i >= 0; --i) diff --git a/modules/dnn/src/layers/convolution_layer.cpp b/modules/dnn/src/layers/convolution_layer.cpp index 400e03dab5..2352b35c15 100644 --- a/modules/dnn/src/layers/convolution_layer.cpp +++ b/modules/dnn/src/layers/convolution_layer.cpp @@ -676,7 +676,7 @@ public: int j0 = std::max(0, (-in_j + dilation_w-1)/dilation_w); int j1 = std::min(kernel_w, (width - in_j + dilation_w-1)/dilation_w); - // here some non-continous sub-row of the row will not be + // here some non-continuous sub-row of the row will not be // filled from the tensor; we need to make sure that the uncovered // elements are explicitly set to 0's. the easiest way is to // set all the elements to 0's before the loop. diff --git a/modules/dnn/src/layers/detection_output_layer.cpp b/modules/dnn/src/layers/detection_output_layer.cpp index ee1ad95e61..e838bcd55f 100644 --- a/modules/dnn/src/layers/detection_output_layer.cpp +++ b/modules/dnn/src/layers/detection_output_layer.cpp @@ -110,7 +110,7 @@ public: float _nmsThreshold; int _topK; - // Whenever predicted bounding boxes are respresented in YXHW instead of XYWH layout. + // Whenever predicted bounding boxes are represented in YXHW instead of XYWH layout. bool _locPredTransposed; // It's true whenever predicted bounding boxes and proposals are normalized to [0, 1]. bool _bboxesNormalized; diff --git a/modules/dnn/src/layers/eltwise_layer.cpp b/modules/dnn/src/layers/eltwise_layer.cpp index 39961abb5f..61a7d0950c 100644 --- a/modules/dnn/src/layers/eltwise_layer.cpp +++ b/modules/dnn/src/layers/eltwise_layer.cpp @@ -79,7 +79,7 @@ public: else if (operation == "max") op = MAX; else - CV_Error(cv::Error::StsBadArg, "Unknown operaticon type \"" + operation + "\""); + CV_Error(cv::Error::StsBadArg, "Unknown operation type \"" + operation + "\""); } if (params.has("coeff")) diff --git a/modules/dnn/src/layers/prior_box_layer.cpp b/modules/dnn/src/layers/prior_box_layer.cpp index 74c0d31f1d..5e0e338429 100644 --- a/modules/dnn/src/layers/prior_box_layer.cpp +++ b/modules/dnn/src/layers/prior_box_layer.cpp @@ -366,7 +366,7 @@ public: kernel.set(13, (int)_imageWidth); kernel.run(1, &nthreads, NULL, false); - // clip the prior's coordidate such that it is within [0, 1] + // clip the prior's coordinate such that it is within [0, 1] if (_clip) { Mat mat = outputs[0].getMat(ACCESS_READ); @@ -442,7 +442,7 @@ public: } } } - // clip the prior's coordidate such that it is within [0, 1] + // clip the prior's coordinate such that it is within [0, 1] if (_clip) { int _outChannelSize = _layerHeight * _layerWidth * _numPriors * 4; @@ -565,7 +565,7 @@ private: std::vector _variance; std::vector _offsetsX; std::vector _offsetsY; - // Precomputed final widhts and heights based on aspect ratios or explicit sizes. + // Precomputed final widths and heights based on aspect ratios or explicit sizes. std::vector _boxWidths; std::vector _boxHeights; diff --git a/modules/dnn/src/ocl4dnn/src/ocl4dnn_conv_spatial.cpp b/modules/dnn/src/ocl4dnn/src/ocl4dnn_conv_spatial.cpp index 44a622f1d4..159319425e 100644 --- a/modules/dnn/src/ocl4dnn/src/ocl4dnn_conv_spatial.cpp +++ b/modules/dnn/src/ocl4dnn/src/ocl4dnn_conv_spatial.cpp @@ -709,7 +709,7 @@ bool OCL4DNNConvSpatial::swizzleWeight(const UMat &weight, return false; } } else { - // assumption: kernel dimesion is 2 + // assumption: kernel dimension is 2 Mat weightMat = weight.getMat(ACCESS_READ); Dtype* cpu_weight = (Dtype *)weightMat.ptr(); Mat swizzledWeightMat; diff --git a/modules/dnn/src/op_inf_engine.cpp b/modules/dnn/src/op_inf_engine.cpp index 710d6e5a88..60da9d14b3 100644 --- a/modules/dnn/src/op_inf_engine.cpp +++ b/modules/dnn/src/op_inf_engine.cpp @@ -288,7 +288,7 @@ void InfEngineBackendNet::init(int targetId) } for (const InferenceEngine::DataPtr& out : l->outData) { - // TODO: Replace to uniquness assertion. + // TODO: Replace to uniqueness assertion. if (internalOutputs.find(out->name) == internalOutputs.end()) internalOutputs[out->name] = out; } @@ -305,7 +305,7 @@ void InfEngineBackendNet::init(int targetId) // Add all outputs. for (const InferenceEngine::DataPtr& out : l->outData) { - // TODO: Replace to uniquness assertion. + // TODO: Replace to uniqueness assertion. if (unconnectedOuts.find(out->name) == unconnectedOuts.end()) unconnectedOuts[out->name] = out; } diff --git a/modules/dnn/src/tensorflow/graph.proto b/modules/dnn/src/tensorflow/graph.proto index f945201399..478d35a9fe 100644 --- a/modules/dnn/src/tensorflow/graph.proto +++ b/modules/dnn/src/tensorflow/graph.proto @@ -86,7 +86,7 @@ message NodeDef { // | ( ("gpu" | "cpu") ":" ([1-9][0-9]* | "*") ) // // Valid values for this string include: - // * "@other/node" (colocate with "other/node") + // * "@other/node" (collocate with "other/node") // * "/job:worker/replica:0/task:1/gpu:3" (full specification) // * "/job:worker/gpu:3" (partial specification) // * "" (no specification) diff --git a/modules/dnn/src/torch/torch_importer.cpp b/modules/dnn/src/torch/torch_importer.cpp index 813ee085cb..3607e6c08e 100644 --- a/modules/dnn/src/torch/torch_importer.cpp +++ b/modules/dnn/src/torch/torch_importer.cpp @@ -311,11 +311,11 @@ struct TorchImporter int numModules = curModule->modules.size(); readTorchObject(index); - if (tensors.count(index)) //tensor was readed + if (tensors.count(index)) //tensor was read { tensorParams.insert(std::make_pair(key, std::make_pair(index, tensors[index]))); } - else if (storages.count(index)) //storage was readed + else if (storages.count(index)) //storage was read { Mat &matStorage = storages[index]; Mat matCasted; @@ -399,7 +399,7 @@ struct TorchImporter size_t requireElems = (size_t)offset + (size_t)steps[0] * (size_t)sizes[0]; size_t storageElems = storages[indexStorage].total(); if (requireElems > storageElems) - CV_Error(Error::StsBadSize, "Storage has insufficent number of elemements for requested Tensor"); + CV_Error(Error::StsBadSize, "Storage has insufficient number of elements for requested Tensor"); //convert sizes AutoBuffer isizes(ndims); diff --git a/modules/dnn/test/test_darknet_importer.cpp b/modules/dnn/test/test_darknet_importer.cpp index 11d2e50ef8..17d33d7662 100644 --- a/modules/dnn/test/test_darknet_importer.cpp +++ b/modules/dnn/test/test_darknet_importer.cpp @@ -143,7 +143,7 @@ TEST_P(Test_Darknet_nets, YoloVoc) std::vector confidences(3); std::vector boxes(3); classIds[0] = 6; confidences[0] = 0.750469f; boxes[0] = Rect2d(0.577374, 0.127391, 0.325575, 0.173418); // a car - classIds[1] = 1; confidences[1] = 0.780879f; boxes[1] = Rect2d(0.270762, 0.264102, 0.461713, 0.48131); // a bycicle + classIds[1] = 1; confidences[1] = 0.780879f; boxes[1] = Rect2d(0.270762, 0.264102, 0.461713, 0.48131); // a bicycle classIds[2] = 11; confidences[2] = 0.901615f; boxes[2] = Rect2d(0.1386, 0.338509, 0.282737, 0.60028); // a dog double scoreDiff = (targetId == DNN_TARGET_OPENCL_FP16 || targetId == DNN_TARGET_MYRIAD) ? 7e-3 : 8e-5; double iouDiff = (targetId == DNN_TARGET_OPENCL_FP16 || targetId == DNN_TARGET_MYRIAD) ? 0.013 : 3e-5; @@ -182,7 +182,7 @@ TEST_P(Test_Darknet_nets, YOLOv3) std::vector confidences(3); std::vector boxes(3); classIds[0] = 7; confidences[0] = 0.952983f; boxes[0] = Rect2d(0.614622, 0.150257, 0.286747, 0.138994); // a truck - classIds[1] = 1; confidences[1] = 0.987908f; boxes[1] = Rect2d(0.150913, 0.221933, 0.591342, 0.524327); // a bycicle + classIds[1] = 1; confidences[1] = 0.987908f; boxes[1] = Rect2d(0.150913, 0.221933, 0.591342, 0.524327); // a bicycle classIds[2] = 16; confidences[2] = 0.998836f; boxes[2] = Rect2d(0.160024, 0.389964, 0.257861, 0.553752); // a dog (COCO) double scoreDiff = (targetId == DNN_TARGET_OPENCL_FP16 || targetId == DNN_TARGET_MYRIAD) ? 4e-3 : 8e-5; double iouDiff = (targetId == DNN_TARGET_OPENCL_FP16 || targetId == DNN_TARGET_MYRIAD) ? 0.011 : 3e-5; diff --git a/modules/dnn/test/test_torch_importer.cpp b/modules/dnn/test/test_torch_importer.cpp index 33e0e94801..ab74b190af 100644 --- a/modules/dnn/test/test_torch_importer.cpp +++ b/modules/dnn/test/test_torch_importer.cpp @@ -250,7 +250,7 @@ TEST_P(Test_Torch_nets, ENet_accuracy) Mat out = net.forward(); Mat ref = blobFromNPY(_tf("torch_enet_prob.npy", false)); // Due to numerical instability in Pooling-Unpooling layers (indexes jittering) - // thresholds for ENet must be changed. Accuracy of resuults was checked on + // thresholds for ENet must be changed. Accuracy of results was checked on // Cityscapes dataset and difference in mIOU with Torch is 10E-4% normAssert(ref, out, "", 0.00044, 0.44); diff --git a/modules/viz/CMakeLists.txt b/modules/viz/CMakeLists.txt index 903022bbaf..1f1e1af3b9 100644 --- a/modules/viz/CMakeLists.txt +++ b/modules/viz/CMakeLists.txt @@ -19,7 +19,7 @@ if(NOT BUILD_SHARED_LIBS) endif() endforeach() if(_conflicts) - message(STATUS "Disabling VIZ module due conflicts with VTK dependencies: ${_conflicts}") + message(STATUS "Disabling VIZ module due to conflicts with VTK dependencies: ${_conflicts}") ocv_module_disable(viz) endif() endif() diff --git a/modules/viz/include/opencv2/viz/widgets.hpp b/modules/viz/include/opencv2/viz/widgets.hpp index dcc1165660..1b73110b58 100644 --- a/modules/viz/include/opencv2/viz/widgets.hpp +++ b/modules/viz/include/opencv2/viz/widgets.hpp @@ -506,7 +506,7 @@ namespace cv }; ///////////////////////////////////////////////////////////////////////////// - /// Compond widgets + /// Compound widgets /** @brief This 3D Widget represents a coordinate system. : */ From 04802e41e9e129f9c37063c927b0d1f53e8b5553 Mon Sep 17 00:00:00 2001 From: Alexander Alekhin Date: Thu, 31 May 2018 20:35:33 +0000 Subject: [PATCH 37/40] gtest: support parameters with types from anonymous namespace --- modules/dnn/test/test_precomp.hpp | 4 ++-- modules/ts/include/opencv2/ts/ts_gtest.h | 30 ++++++++++++++++-------- modules/ts/src/ts_gtest.cpp | 2 ++ 3 files changed, 24 insertions(+), 12 deletions(-) diff --git a/modules/dnn/test/test_precomp.hpp b/modules/dnn/test/test_precomp.hpp index 062308bf67..70b7b3d25e 100644 --- a/modules/dnn/test/test_precomp.hpp +++ b/modules/dnn/test/test_precomp.hpp @@ -49,7 +49,7 @@ #include "opencv2/dnn.hpp" #include "test_common.hpp" -namespace opencv_test { +namespace opencv_test { namespace { using namespace cv::dnn; CV_ENUM(DNNBackend, DNN_BACKEND_DEFAULT, DNN_BACKEND_HALIDE, DNN_BACKEND_INFERENCE_ENGINE) @@ -69,6 +69,6 @@ static testing::internal::ParamGenerator availableDnnTargets() return testing::ValuesIn(targets); } -} +}} #endif diff --git a/modules/ts/include/opencv2/ts/ts_gtest.h b/modules/ts/include/opencv2/ts/ts_gtest.h index 7c1854e9a5..2b1299c3bf 100644 --- a/modules/ts/include/opencv2/ts/ts_gtest.h +++ b/modules/ts/include/opencv2/ts/ts_gtest.h @@ -11539,12 +11539,15 @@ typename ParamNameGenFunc::Type *GetParamNameGen() { return DefaultParamName; } +} // namespace internal:: // fixes MacOS X issue with "friend class internal/*::anon*/::ParameterizedTestFactory;" +namespace { // wrap into anynomous namespace to avoid build warnings like GCC's -Wsubobject-linkage + // INTERNAL IMPLEMENTATION - DO NOT USE IN USER CODE. // // Stores a parameter value and later creates tests parameterized with that // value. template -class ParameterizedTestFactory : public TestFactoryBase { +class ParameterizedTestFactory : public internal::TestFactoryBase { public: typedef typename TestClass::ParamType ParamType; explicit ParameterizedTestFactory(ParamType parameter) : @@ -11559,6 +11562,8 @@ class ParameterizedTestFactory : public TestFactoryBase { GTEST_DISALLOW_COPY_AND_ASSIGN_(ParameterizedTestFactory); }; +} // namespace +namespace internal { // INTERNAL IMPLEMENTATION - DO NOT USE IN USER CODE. // @@ -20405,6 +20410,12 @@ class GTEST_API_ AssertHelper { } // namespace internal #if GTEST_HAS_PARAM_TEST + +namespace internal { +// Static value used for accessing test parameter during a test lifetime. +extern void* g_parameter_; +} // namespace internal + // The pure interface class that all value-parameterized tests inherit from. // A value-parameterized class must inherit from both ::testing::Test and // ::testing::WithParamInterface. In most cases that just means inheriting @@ -20451,29 +20462,28 @@ class WithParamInterface { // like writing 'WithParamInterface::GetParam()' for a test that // uses a fixture whose parameter type is int. const ParamType& GetParam() const { - GTEST_CHECK_(parameter_ != NULL) + GTEST_CHECK_(GetParameterPtrRef_() != NULL) << "GetParam() can only be called inside a value-parameterized test " << "-- did you intend to write TEST_P instead of TEST_F?"; - return *parameter_; + return *GetParameterPtrRef_(); } private: // Sets parameter value. The caller is responsible for making sure the value // remains alive and unchanged throughout the current test. static void SetParam(const ParamType* parameter) { - parameter_ = parameter; + GetParameterPtrRef_() = parameter; } - // Static value used for accessing parameter during a test lifetime. - static const ParamType* parameter_; + static const ParamType*& GetParameterPtrRef_() + { + return (const ParamType*&)internal::g_parameter_; + } // TestClass must be a subclass of WithParamInterface and Test. - template friend class internal::ParameterizedTestFactory; + template friend class /*internal::*/ParameterizedTestFactory; }; -template -const T* WithParamInterface::parameter_ = NULL; - // Most value-parameterized classes can ignore the existence of // WithParamInterface, and can just inherit from ::testing::TestWithParam. diff --git a/modules/ts/src/ts_gtest.cpp b/modules/ts/src/ts_gtest.cpp index ff9dd4a1bf..f71efbb0eb 100644 --- a/modules/ts/src/ts_gtest.cpp +++ b/modules/ts/src/ts_gtest.cpp @@ -10441,5 +10441,7 @@ const char* TypedTestCasePState::VerifyRegisteredTestNames( #endif // GTEST_HAS_TYPED_TEST_P +void* g_parameter_ = NULL; + } // namespace internal } // namespace testing From fc35c77f00219cf62eed63867240b10c6a7b1a26 Mon Sep 17 00:00:00 2001 From: Rostislav Vasilikhin Date: Mon, 4 Jun 2018 16:03:12 +0300 Subject: [PATCH 38/40] Merge pull request #11610 from savuor:fix/stereobm_simd_fixed_float * StereoBM: fixed SIMD processing for fixed-type output arrays * changed norm type and threshold, added assertion * fixed disp_shift --- modules/calib3d/src/stereobm.cpp | 68 ++++++++++++++++---- modules/calib3d/test/test_stereomatching.cpp | 6 ++ 2 files changed, 61 insertions(+), 13 deletions(-) diff --git a/modules/calib3d/src/stereobm.cpp b/modules/calib3d/src/stereobm.cpp index b3498c66d1..7aa721dd3d 100644 --- a/modules/calib3d/src/stereobm.cpp +++ b/modules/calib3d/src/stereobm.cpp @@ -284,7 +284,39 @@ prefilterXSobel( const Mat& src, Mat& dst, int ftzero ) static const int DISPARITY_SHIFT_16S = 4; static const int DISPARITY_SHIFT_32S = 8; +template +struct dispShiftTemplate +{ }; + +template<> +struct dispShiftTemplate +{ + enum { value = DISPARITY_SHIFT_16S }; +}; + +template<> +struct dispShiftTemplate +{ + enum { value = DISPARITY_SHIFT_32S }; +}; + +template +inline T dispDescale(int /*v1*/, int /*v2*/, int /*d*/); + +template<> +inline short dispDescale(int v1, int v2, int d) +{ + return (short)((v1*256 + (d != 0 ? v2*256/d : 0) + 15) >> 4); +} + +template <> +inline int dispDescale(int v1, int v2, int d) +{ + return (int)(v1*256 + (d != 0 ? v2*256/d : 0)); // no need to add 127, this will be converted to float +} + #if CV_SIMD128 +template static void findStereoCorrespondenceBM_SIMD( const Mat& left, const Mat& right, Mat& disp, Mat& cost, StereoBMParams& state, uchar* buf, int _dy0, int _dy1 ) @@ -302,7 +334,8 @@ static void findStereoCorrespondenceBM_SIMD( const Mat& left, const Mat& right, int ftzero = state.preFilterCap; int textureThreshold = state.textureThreshold; int uniquenessRatio = state.uniquenessRatio; - short FILTERED = (short)((mindisp - 1) << DISPARITY_SHIFT_16S); + const int disp_shift = dispShiftTemplate::value; + dType FILTERED = (dType)((mindisp - 1) << disp_shift); ushort *sad, *hsad0, *hsad, *hsad_sub; int *htext; @@ -310,7 +343,7 @@ static void findStereoCorrespondenceBM_SIMD( const Mat& left, const Mat& right, const uchar* lptr0 = left.ptr() + lofs; const uchar* rptr0 = right.ptr() + rofs; const uchar *lptr, *lptr_sub, *rptr; - short* dptr = disp.ptr(); + dType* dptr = disp.ptr(); int sstep = (int)left.step; int dstep = (int)(disp.step/sizeof(dptr[0])); int cstep = (height + dy0 + dy1)*ndisp; @@ -527,10 +560,10 @@ static void findStereoCorrespondenceBM_SIMD( const Mat& left, const Mat& right, { int p = sad[mind+1], n = sad[mind-1]; d = p + n - 2*sad[mind] + std::abs(p - n); - dptr[y*dstep] = (short)(((ndisp - mind - 1 + mindisp)*256 + (d != 0 ? (p-n)*256/d : 0) + 15) >> 4); + dptr[y*dstep] = dispDescale(ndisp - mind - 1 + mindisp, p-n, d); } else - dptr[y*dstep] = (short)((ndisp - mind - 1 + mindisp)*16); + dptr[y*dstep] = dispDescale(ndisp - mind - 1 + mindisp, 0, 0); costptr[y*coststep] = sad[mind]; } } @@ -540,8 +573,8 @@ static void findStereoCorrespondenceBM_SIMD( const Mat& left, const Mat& right, template static void findStereoCorrespondenceBM( const Mat& left, const Mat& right, - Mat& disp, Mat& cost, const StereoBMParams& state, - uchar* buf, int _dy0, int _dy1, const int disp_shift ) + Mat& disp, Mat& cost, const StereoBMParams& state, + uchar* buf, int _dy0, int _dy1 ) { const int ALIGN = 16; @@ -557,6 +590,7 @@ findStereoCorrespondenceBM( const Mat& left, const Mat& right, int ftzero = state.preFilterCap; int textureThreshold = state.textureThreshold; int uniquenessRatio = state.uniquenessRatio; + const int disp_shift = dispShiftTemplate::value; mType FILTERED = (mType)((mindisp - 1) << disp_shift); #if CV_SIMD128 @@ -849,8 +883,8 @@ findStereoCorrespondenceBM( const Mat& left, const Mat& right, sad[ndisp] = sad[ndisp-2]; int p = sad[mind+1], n = sad[mind-1]; d = p + n - 2*sad[mind] + std::abs(p - n); - dptr[y*dstep] = (mType)(((ndisp - mind - 1 + mindisp)*256 + (d != 0 ? (p-n)*256/d : 0) + 15) - >> (DISPARITY_SHIFT_32S - disp_shift)); + dptr[y*dstep] = dispDescale(ndisp - mind - 1 + mindisp, p-n, d); + costptr[y*coststep] = sad[mind]; } } @@ -980,7 +1014,10 @@ struct FindStereoCorrespInvoker : public ParallelLoopBody int _row0 = std::min(cvRound(range.start * rows / nstripes), rows); int _row1 = std::min(cvRound(range.end * rows / nstripes), rows); uchar *ptr = slidingSumBuf->ptr() + range.start * stripeBufSize; - int FILTERED = (state->minDisparity - 1)*16; + + int dispShift = disp->type() == CV_16S ? DISPARITY_SHIFT_16S : + DISPARITY_SHIFT_32S; + int FILTERED = (state->minDisparity - 1) << dispShift; Rect roi = validDisparityRect & Rect(0, _row0, cols, _row1 - _row0); if( roi.height == 0 ) @@ -1008,15 +1045,18 @@ struct FindStereoCorrespInvoker : public ParallelLoopBody #if CV_SIMD128 if( useSIMD && useShorts ) { - findStereoCorrespondenceBM_SIMD( left_i, right_i, disp_i, cost_i, *state, ptr, row0, rows - row1 ); + if( disp_i.type() == CV_16S) + findStereoCorrespondenceBM_SIMD( left_i, right_i, disp_i, cost_i, *state, ptr, row0, rows - row1 ); + else + findStereoCorrespondenceBM_SIMD( left_i, right_i, disp_i, cost_i, *state, ptr, row0, rows - row1); } else #endif { if( disp_i.type() == CV_16S ) - findStereoCorrespondenceBM( left_i, right_i, disp_i, cost_i, *state, ptr, row0, rows - row1, DISPARITY_SHIFT_16S ); + findStereoCorrespondenceBM( left_i, right_i, disp_i, cost_i, *state, ptr, row0, rows - row1 ); else - findStereoCorrespondenceBM( left_i, right_i, disp_i, cost_i, *state, ptr, row0, rows - row1, DISPARITY_SHIFT_32S ); + findStereoCorrespondenceBM( left_i, right_i, disp_i, cost_i, *state, ptr, row0, rows - row1 ); } if( state->disp12MaxDiff >= 0 ) @@ -1104,7 +1144,6 @@ public: else disp_shift = DISPARITY_SHIFT_32S; - int FILTERED = (params.minDisparity - 1) << disp_shift; #ifdef HAVE_OPENCL @@ -1115,6 +1154,9 @@ public: { if(ocl_stereobm(left, right, disparr, ¶ms)) { + disp_shift = DISPARITY_SHIFT_16S; + FILTERED = (params.minDisparity - 1) << disp_shift; + if( params.speckleRange >= 0 && params.speckleWindowSize > 0 ) filterSpeckles(disparr.getMat(), FILTERED, params.speckleWindowSize, params.speckleRange, slidingSumBuf); if (dtype == CV_32F) diff --git a/modules/calib3d/test/test_stereomatching.cpp b/modules/calib3d/test/test_stereomatching.cpp index c79f013ea5..94fc9718cc 100644 --- a/modules/calib3d/test/test_stereomatching.cpp +++ b/modules/calib3d/test/test_stereomatching.cpp @@ -791,6 +791,12 @@ protected: bm->compute( leftImg, rightImg, tempDisp ); tempDisp.convertTo(leftDisp, CV_32F, 1./StereoMatcher::DISP_SCALE); + //check for fixed-type disparity data type + Mat_ fixedFloatDisp; + bm->compute( leftImg, rightImg, fixedFloatDisp ); + EXPECT_LT(cvtest::norm(fixedFloatDisp, leftDisp, cv::NORM_L2 | cv::NORM_RELATIVE), + 0.005 + DBL_EPSILON); + if (params.mindisp != 0) for (int y = 0; y < leftDisp.rows; y++) for (int x = 0; x < leftDisp.cols; x++) From 0560747d3d977c2c03a7e8e8ca181dde26b0189c Mon Sep 17 00:00:00 2001 From: Alexander Alekhin Date: Mon, 4 Jun 2018 17:15:56 +0300 Subject: [PATCH 39/40] docs: preserve space symbols during bs4 processing --- doc/tools/html_functions.py | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/doc/tools/html_functions.py b/doc/tools/html_functions.py index c3fd3bd8e9..b76639cea5 100644 --- a/doc/tools/html_functions.py +++ b/doc/tools/html_functions.py @@ -3,6 +3,7 @@ import sys import logging import os +import re from pprint import pprint import traceback @@ -17,12 +18,20 @@ except ImportError: def load_html_file(file_dir): """ Uses BeautifulSoup to load an html """ with open(file_dir, 'rb') as fp: - soup = BeautifulSoup(fp, 'html.parser') + data = fp.read() + if os.name == 'nt' or sys.version_info[0] == 3: + data = data.decode(encoding='utf-8', errors='strict') + data = re.sub(r'(\>)([ ]+)', lambda match: match.group(1) + ('!space!' * len(match.group(2))), data) + data = re.sub(r'([ ]+)(\<)', lambda match: ('!space!' * len(match.group(1))) + match.group(2), data) + if os.name == 'nt' or sys.version_info[0] == 3: + data = data.encode('utf-8', 'ignore') + soup = BeautifulSoup(data, 'html.parser') return soup def update_html(file, soup): s = str(soup) - if os.name == 'nt' or sys.version_info[0] == 3: # if Windows + s = s.replace('!space!', ' ') + if os.name == 'nt' or sys.version_info[0] == 3: s = s.encode('utf-8', 'ignore') with open(file, 'wb') as f: f.write(s) From dcb9bc254431a5312f761f006c1c2d6cc76aa0f0 Mon Sep 17 00:00:00 2001 From: Alexander Alekhin Date: Mon, 4 Jun 2018 17:58:06 +0300 Subject: [PATCH 40/40] python: eliminate pylint warnings Tested with: - pylint 1.9.1 --- modules/python/test/test_misc.py | 6 +++--- samples/dnn/object_detection.py | 2 +- samples/python/calibrate.py | 2 +- samples/python/camera_calibration_show_extrinsics.py | 2 +- .../histogram_equalization/EqualizeHist_Demo.py | 2 +- samples/python/tutorial_code/imgProc/Smoothing/smoothing.py | 2 +- samples/python/tutorial_code/imgProc/threshold/threshold.py | 2 +- .../imgProc/threshold_inRange/threshold_inRange.py | 2 +- 8 files changed, 10 insertions(+), 10 deletions(-) diff --git a/modules/python/test/test_misc.py b/modules/python/test/test_misc.py index abb66c13de..5f07d733f2 100644 --- a/modules/python/test/test_misc.py +++ b/modules/python/test/test_misc.py @@ -23,7 +23,7 @@ class Bindings(NewOpenCVTests): try: cv.imshow("", None) # This causes an assert self.assertEqual("Dead code", 0) - except cv.error as e: + except cv.error as _e: pass handler_called = [False] @@ -34,7 +34,7 @@ class Bindings(NewOpenCVTests): try: cv.imshow("", None) # This causes an assert self.assertEqual("Dead code", 0) - except cv.error as e: + except cv.error as _e: self.assertEqual(handler_called[0], True) pass @@ -42,7 +42,7 @@ class Bindings(NewOpenCVTests): try: cv.imshow("", None) # This causes an assert self.assertEqual("Dead code", 0) - except cv.error as e: + except cv.error as _e: pass diff --git a/samples/dnn/object_detection.py b/samples/dnn/object_detection.py index a299b558e7..2cfb6d2106 100644 --- a/samples/dnn/object_detection.py +++ b/samples/dnn/object_detection.py @@ -174,7 +174,7 @@ while cv.waitKey(1) < 0: net.setInput(blob) if net.getLayer(0).outputNameToIndex('im_info') != -1: # Faster-RCNN or R-FCN frame = cv.resize(frame, (inpWidth, inpHeight)) - net.setInput(np.array([inpHeight, inpWidth, 1.6], dtype=np.float32), 'im_info'); + net.setInput(np.array([inpHeight, inpWidth, 1.6], dtype=np.float32), 'im_info') outs = net.forward(getOutputsNames(net)) postprocess(frame, outs) diff --git a/samples/python/calibrate.py b/samples/python/calibrate.py index 14019127bc..a2970a95e7 100755 --- a/samples/python/calibrate.py +++ b/samples/python/calibrate.py @@ -71,7 +71,7 @@ if __name__ == '__main__': if debug_dir: vis = cv.cvtColor(img, cv.COLOR_GRAY2BGR) cv.drawChessboardCorners(vis, pattern_size, corners, found) - path, name, ext = splitfn(fn) + _path, name, _ext = splitfn(fn) outfile = os.path.join(debug_dir, name + '_chess.png') cv.imwrite(outfile, vis) diff --git a/samples/python/camera_calibration_show_extrinsics.py b/samples/python/camera_calibration_show_extrinsics.py index 7b1b0cf980..75274aea9e 100755 --- a/samples/python/camera_calibration_show_extrinsics.py +++ b/samples/python/camera_calibration_show_extrinsics.py @@ -91,7 +91,7 @@ def create_board_model(extrinsics, board_width, board_height, square_size, draw_ # draw calibration board X_board = np.ones((4,5)) - X_board_cam = np.ones((extrinsics.shape[0],4,5)) + #X_board_cam = np.ones((extrinsics.shape[0],4,5)) X_board[0:3,0] = [0,0,0] X_board[0:3,1] = [width,0,0] X_board[0:3,2] = [width,height,0] diff --git a/samples/python/tutorial_code/Histograms_Matching/histogram_equalization/EqualizeHist_Demo.py b/samples/python/tutorial_code/Histograms_Matching/histogram_equalization/EqualizeHist_Demo.py index 47caec4d0a..fb87cce75a 100644 --- a/samples/python/tutorial_code/Histograms_Matching/histogram_equalization/EqualizeHist_Demo.py +++ b/samples/python/tutorial_code/Histograms_Matching/histogram_equalization/EqualizeHist_Demo.py @@ -18,7 +18,7 @@ src = cv.cvtColor(src, cv.COLOR_BGR2GRAY) ## [Convert to grayscale] ## [Apply Histogram Equalization] -dst = cv.equalizeHist(src); +dst = cv.equalizeHist(src) ## [Apply Histogram Equalization] ## [Display results] diff --git a/samples/python/tutorial_code/imgProc/Smoothing/smoothing.py b/samples/python/tutorial_code/imgProc/Smoothing/smoothing.py index 205ee6d488..e7096580ad 100644 --- a/samples/python/tutorial_code/imgProc/Smoothing/smoothing.py +++ b/samples/python/tutorial_code/imgProc/Smoothing/smoothing.py @@ -88,7 +88,7 @@ def main(argv): def display_caption(caption): global dst dst = np.zeros(src.shape, src.dtype) - rows, cols, ch = src.shape + rows, cols, _ch = src.shape cv.putText(dst, caption, (int(cols / 4), int(rows / 2)), cv.FONT_HERSHEY_COMPLEX, 1, (255, 255, 255)) diff --git a/samples/python/tutorial_code/imgProc/threshold/threshold.py b/samples/python/tutorial_code/imgProc/threshold/threshold.py index 0d640750aa..1ba38126c9 100644 --- a/samples/python/tutorial_code/imgProc/threshold/threshold.py +++ b/samples/python/tutorial_code/imgProc/threshold/threshold.py @@ -33,7 +33,7 @@ if src is None: print('Could not open or find the image: ', args.input) exit(0) # Convert the image to Gray -src_gray = cv.cvtColor(src, cv.COLOR_BGR2GRAY); +src_gray = cv.cvtColor(src, cv.COLOR_BGR2GRAY) ## [load] ## [window] diff --git a/samples/python/tutorial_code/imgProc/threshold_inRange/threshold_inRange.py b/samples/python/tutorial_code/imgProc/threshold_inRange/threshold_inRange.py index 77d95fe395..d54d93c7fc 100644 --- a/samples/python/tutorial_code/imgProc/threshold_inRange/threshold_inRange.py +++ b/samples/python/tutorial_code/imgProc/threshold_inRange/threshold_inRange.py @@ -94,7 +94,7 @@ while True: break frame_HSV = cv.cvtColor(frame, cv.COLOR_BGR2HSV) - frame_threshold = cv.inRange(frame_HSV, (low_H, low_S, low_V), (high_H, high_S, high_V)); + frame_threshold = cv.inRange(frame_HSV, (low_H, low_S, low_V), (high_H, high_S, high_V)) ## [while] ## [show]