summaryrefslogtreecommitdiffstats
path: root/ml/dlib/dlib/image_processing
diff options
context:
space:
mode:
Diffstat (limited to 'ml/dlib/dlib/image_processing')
-rw-r--r--ml/dlib/dlib/image_processing/box_overlap_testing.h215
-rw-r--r--ml/dlib/dlib/image_processing/box_overlap_testing_abstract.h201
-rw-r--r--ml/dlib/dlib/image_processing/correlation_tracker.h404
-rw-r--r--ml/dlib/dlib/image_processing/correlation_tracker_abstract.h162
-rw-r--r--ml/dlib/dlib/image_processing/detection_template_tools.h113
-rw-r--r--ml/dlib/dlib/image_processing/detection_template_tools_abstract.h95
-rw-r--r--ml/dlib/dlib/image_processing/frontal_face_detector.h2373
-rw-r--r--ml/dlib/dlib/image_processing/frontal_face_detector_abstract.h25
-rw-r--r--ml/dlib/dlib/image_processing/full_object_detection.h191
-rw-r--r--ml/dlib/dlib/image_processing/full_object_detection_abstract.h203
-rw-r--r--ml/dlib/dlib/image_processing/generic_image.h431
-rw-r--r--ml/dlib/dlib/image_processing/object_detector.h626
-rw-r--r--ml/dlib/dlib/image_processing/object_detector_abstract.h404
-rw-r--r--ml/dlib/dlib/image_processing/remove_unobtainable_rectangles.h317
-rw-r--r--ml/dlib/dlib/image_processing/remove_unobtainable_rectangles_abstract.h56
-rw-r--r--ml/dlib/dlib/image_processing/render_face_detections.h99
-rw-r--r--ml/dlib/dlib/image_processing/render_face_detections_abstract.h59
-rw-r--r--ml/dlib/dlib/image_processing/scan_fhog_pyramid.h1348
-rw-r--r--ml/dlib/dlib/image_processing/scan_fhog_pyramid_abstract.h784
-rw-r--r--ml/dlib/dlib/image_processing/scan_image.h368
-rw-r--r--ml/dlib/dlib/image_processing/scan_image_abstract.h227
-rw-r--r--ml/dlib/dlib/image_processing/scan_image_boxes.h630
-rw-r--r--ml/dlib/dlib/image_processing/scan_image_boxes_abstract.h394
-rw-r--r--ml/dlib/dlib/image_processing/scan_image_custom.h401
-rw-r--r--ml/dlib/dlib/image_processing/scan_image_custom_abstract.h390
-rw-r--r--ml/dlib/dlib/image_processing/scan_image_pyramid.h1101
-rw-r--r--ml/dlib/dlib/image_processing/scan_image_pyramid_abstract.h495
-rw-r--r--ml/dlib/dlib/image_processing/scan_image_pyramid_tools.h180
-rw-r--r--ml/dlib/dlib/image_processing/scan_image_pyramid_tools_abstract.h118
-rw-r--r--ml/dlib/dlib/image_processing/setup_hashed_features.h219
-rw-r--r--ml/dlib/dlib/image_processing/setup_hashed_features_abstract.h210
-rw-r--r--ml/dlib/dlib/image_processing/shape_predictor.h524
-rw-r--r--ml/dlib/dlib/image_processing/shape_predictor_abstract.h195
-rw-r--r--ml/dlib/dlib/image_processing/shape_predictor_trainer.h852
-rw-r--r--ml/dlib/dlib/image_processing/shape_predictor_trainer_abstract.h418
35 files changed, 14828 insertions, 0 deletions
diff --git a/ml/dlib/dlib/image_processing/box_overlap_testing.h b/ml/dlib/dlib/image_processing/box_overlap_testing.h
new file mode 100644
index 000000000..32409d13e
--- /dev/null
+++ b/ml/dlib/dlib/image_processing/box_overlap_testing.h
@@ -0,0 +1,215 @@
+// Copyright (C) 2011 Davis E. King (davis@dlib.net)
+// License: Boost Software License See LICENSE.txt for the full license.
+#ifndef DLIB_BOX_OVERlAP_TESTING_Hh_
+#define DLIB_BOX_OVERlAP_TESTING_Hh_
+
+#include "box_overlap_testing_abstract.h"
+#include "../geometry.h"
+#include <vector>
+
+namespace dlib
+{
+
+// ----------------------------------------------------------------------------------------
+
+ inline double box_intersection_over_union (
+ const drectangle& a,
+ const drectangle& b
+ )
+ {
+ const double inner = a.intersect(b).area();
+ if (inner == 0)
+ return 0;
+ const double outer = (a+b).area();
+ return inner/outer;
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ inline double box_intersection_over_union (
+ const rectangle& a,
+ const rectangle& b
+ )
+ {
+ return box_intersection_over_union(drectangle(a),drectangle(b));
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ inline double box_percent_covered (
+ const drectangle& a,
+ const drectangle& b
+ )
+ {
+ const double inner = a.intersect(b).area();
+ if (inner == 0)
+ return 0;
+ return std::max(inner/a.area(), inner/b.area());
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ inline double box_percent_covered (
+ const rectangle& a,
+ const rectangle& b
+ )
+ {
+ return box_percent_covered(drectangle(a), drectangle(b));
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ class test_box_overlap
+ {
+ public:
+ test_box_overlap (
+ ) : iou_thresh(0.5), percent_covered_thresh(1.0)
+ {}
+
+ explicit test_box_overlap (
+ double iou_thresh_,
+ double percent_covered_thresh_ = 1.0
+ ) : iou_thresh(iou_thresh_), percent_covered_thresh(percent_covered_thresh_)
+ {
+ // make sure requires clause is not broken
+ DLIB_ASSERT(0 <= iou_thresh && iou_thresh <= 1 &&
+ 0 <= percent_covered_thresh && percent_covered_thresh <= 1,
+ "\t test_box_overlap::test_box_overlap(iou_thresh, percent_covered_thresh)"
+ << "\n\t Invalid inputs were given to this function "
+ << "\n\t iou_thresh: " << iou_thresh
+ << "\n\t percent_covered_thresh: " << percent_covered_thresh
+ << "\n\t this: " << this
+ );
+
+ }
+
+ bool operator() (
+ const dlib::rectangle& a,
+ const dlib::rectangle& b
+ ) const
+ {
+ const double inner = a.intersect(b).area();
+ if (inner == 0)
+ return false;
+
+ const double outer = (a+b).area();
+ if (inner/outer > iou_thresh ||
+ inner/a.area() > percent_covered_thresh ||
+ inner/b.area() > percent_covered_thresh)
+ return true;
+ else
+ return false;
+ }
+
+ double get_percent_covered_thresh (
+ ) const
+ {
+ return percent_covered_thresh;
+ }
+
+ double get_iou_thresh (
+ ) const
+ {
+ return iou_thresh;
+ }
+
+ private:
+ double iou_thresh;
+ double percent_covered_thresh;
+ };
+
+// ----------------------------------------------------------------------------------------
+
+ inline void serialize (
+ const test_box_overlap& item,
+ std::ostream& out
+ )
+ {
+ serialize(item.get_iou_thresh(), out);
+ serialize(item.get_percent_covered_thresh(), out);
+ }
+
+ inline void deserialize (
+ test_box_overlap& item,
+ std::istream& in
+ )
+ {
+ double percent_covered_thresh, iou_thresh;
+ deserialize(iou_thresh, in);
+ deserialize(percent_covered_thresh, in);
+ item = test_box_overlap(iou_thresh, percent_covered_thresh);
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ inline test_box_overlap find_tight_overlap_tester (
+ const std::vector<std::vector<rectangle> >& rects
+ )
+ {
+ double max_pcov = 0;
+ double max_iou_score = 0;
+ for (unsigned long i = 0; i < rects.size(); ++i)
+ {
+ for (unsigned long j = 0; j < rects[i].size(); ++j)
+ {
+ for (unsigned long k = j+1; k < rects[i].size(); ++k)
+ {
+ const rectangle a = rects[i][j];
+ const rectangle b = rects[i][k];
+ const double iou_score = (a.intersect(b)).area()/(double)(a+b).area();
+ const double pcov_a = (a.intersect(b)).area()/(double)(a).area();
+ const double pcov_b = (a.intersect(b)).area()/(double)(b).area();
+
+ if (iou_score > max_iou_score)
+ max_iou_score = iou_score;
+
+ if (pcov_a > max_pcov)
+ max_pcov = pcov_a;
+ if (pcov_b > max_pcov)
+ max_pcov = pcov_b;
+ }
+ }
+ }
+
+ // Relax these thresholds very slightly. We do this because on some systems the
+ // boxes that generated the max values erroneously trigger a box overlap iou even
+ // though their percent covered and iou values are *equal* to the thresholds but
+ // not greater. That is, sometimes when double values get moved around they change
+ // their values slightly, so this avoids the problems that can create.
+ max_iou_score = std::min(1.0000001*max_iou_score, 1.0);
+ max_pcov = std::min(1.0000001*max_pcov, 1.0);
+ return test_box_overlap(max_iou_score, max_pcov);
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ inline bool overlaps_any_box (
+ const test_box_overlap& tester,
+ const std::vector<rectangle>& rects,
+ const rectangle& rect
+ )
+ {
+ for (unsigned long i = 0; i < rects.size(); ++i)
+ {
+ if (tester(rects[i],rect))
+ return true;
+ }
+ return false;
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ inline bool overlaps_any_box (
+ const std::vector<rectangle>& rects,
+ const rectangle& rect
+ )
+ {
+ return overlaps_any_box(test_box_overlap(),rects,rect);
+ }
+
+// ----------------------------------------------------------------------------------------
+
+}
+
+#endif // DLIB_BOX_OVERlAP_TESTING_Hh_
+
diff --git a/ml/dlib/dlib/image_processing/box_overlap_testing_abstract.h b/ml/dlib/dlib/image_processing/box_overlap_testing_abstract.h
new file mode 100644
index 000000000..1bb4a28ae
--- /dev/null
+++ b/ml/dlib/dlib/image_processing/box_overlap_testing_abstract.h
@@ -0,0 +1,201 @@
+// Copyright (C) 2011 Davis E. King (davis@dlib.net)
+// License: Boost Software License See LICENSE.txt for the full license.
+#undef DLIB_BOX_OVERlAP_TESTING_ABSTRACT_Hh_
+#ifdef DLIB_BOX_OVERlAP_TESTING_ABSTRACT_Hh_
+
+#include "../geometry.h"
+
+namespace dlib
+{
+
+// ----------------------------------------------------------------------------------------
+
+ inline double box_intersection_over_union (
+ const drectangle& a,
+ const drectangle& b
+ );
+ /*!
+ ensures
+ - returns area of the intersection of a and b divided by (a+b).area(). If both
+ boxes are empty then returns 0.
+ !*/
+
+// ----------------------------------------------------------------------------------------
+
+ inline double box_intersection_over_union (
+ const rectangle& a,
+ const rectangle& b
+ );
+ /*!
+ ensures
+ - returns area of the intersection of a and b divided by (a+b).area(). If both
+ boxes are empty then returns 0.
+ !*/
+
+// ----------------------------------------------------------------------------------------
+
+ inline double box_percent_covered (
+ const drectangle& a,
+ const drectangle& b
+ );
+ /*!
+ ensures
+ - let OVERLAP = a.intersect(b).area()
+ - This function returns max(OVERLAP/a.area(), OVERLAP/b.area())
+ e.g. If one box entirely contains another then this function returns 1, if
+ they don't overlap at all it returns 0.
+ !*/
+
+// ----------------------------------------------------------------------------------------
+
+ inline double box_percent_covered (
+ const rectangle& a,
+ const rectangle& b
+ );
+ /*!
+ ensures
+ - let OVERLAP = a.intersect(b).area()
+ - This function returns max(OVERLAP/a.area(), OVERLAP/b.area())
+ e.g. If one box entirely contains another then this function returns 1, if
+ they don't overlap at all it returns 0.
+ !*/
+
+// ----------------------------------------------------------------------------------------
+
+ class test_box_overlap
+ {
+ /*!
+ WHAT THIS OBJECT REPRESENTS
+ This object is a simple function object for determining if two rectangles
+ overlap.
+
+ THREAD SAFETY
+ Concurrent access to an instance of this object is safe provided that
+ only const member functions are invoked. Otherwise, access must be
+ protected by a mutex lock.
+ !*/
+
+ public:
+ test_box_overlap (
+ );
+ /*!
+ ensures
+ - #get_iou_thresh() == 0.5
+ - #get_percent_covered_thresh() == 1.0
+ !*/
+
+ explicit test_box_overlap (
+ double iou_thresh,
+ double percent_covered_thresh = 1.0
+ );
+ /*!
+ requires
+ - 0 <= iou_thresh <= 1
+ - 0 <= percent_covered_thresh <= 1
+ ensures
+ - #get_iou_thresh() == iou_thresh
+ - #get_percent_covered_thresh() == percent_covered_thresh
+ !*/
+
+ bool operator() (
+ const dlib::rectangle& a,
+ const dlib::rectangle& b
+ ) const;
+ /*!
+ ensures
+ - returns true if a and b overlap "enough". This is defined precisely below.
+ - if (a.intersect(b).area()/(a+b).area() > get_iou_thresh() ||
+ a.intersect(b).area()/a.area() > get_percent_covered_thresh() ||
+ a.intersect(b).area()/b.area() > get_percent_covered_thresh() ) then
+ - returns true
+ - else
+ - returns false
+ !*/
+
+ double get_iou_thresh (
+ ) const;
+ /*!
+ ensures
+ - returns the threshold used to determine if two rectangle's intersection
+ over union value is big enough to be considered a match. Note that the
+ iou score varies from 0 to 1 and only becomes 1 when two rectangles are
+ identical.
+ !*/
+
+ double get_percent_covered_thresh (
+ ) const;
+ /*!
+ ensures
+ - returns the threshold used to determine if two rectangles overlap. This
+ value is the percent of a rectangle's area covered by another rectangle.
+ !*/
+
+ };
+
+// ----------------------------------------------------------------------------------------
+
+ void serialize (
+ const test_box_overlap& item,
+ std::ostream& out
+ );
+ /*!
+ provides serialization support
+ !*/
+
+ void deserialize (
+ test_box_overlap& item,
+ std::istream& in
+ );
+ /*!
+ provides deserialization support
+ !*/
+
+// ----------------------------------------------------------------------------------------
+
+ test_box_overlap find_tight_overlap_tester (
+ const std::vector<std::vector<rectangle> >& rects
+ );
+ /*!
+ ensures
+ - This function finds the most restrictive test_box_overlap object possible
+ that is consistent with the given set of sets of rectangles.
+ - To be precise, this function finds and returns a test_box_overlap object
+ TBO such that:
+ - TBO.get_iou_thresh() and TBO.get_percent_covered_thresh() are as small
+ as possible such that the following conditions are satisfied.
+ - for all valid i:
+ - for all distinct rectangles A and B in rects[i]:
+ - TBO(A,B) == false
+ !*/
+
+// ----------------------------------------------------------------------------------------
+
+ bool overlaps_any_box (
+ const test_box_overlap& tester,
+ const std::vector<rectangle>& rects,
+ const rectangle& rect
+ );
+ /*!
+ ensures
+ - returns true if rect overlaps any box in rects and false otherwise. Overlap
+ is determined based on the given tester object.
+ !*/
+
+// ----------------------------------------------------------------------------------------
+
+ bool overlaps_any_box (
+ const std::vector<rectangle>& rects,
+ const rectangle& rect
+ );
+ /*!
+ ensures
+ - returns overlaps_any_box(test_box_overlap(), rects, rect)
+ !*/
+
+// ----------------------------------------------------------------------------------------
+
+}
+
+#endif // DLIB_BOX_OVERlAP_TESTING_ABSTRACT_Hh_
+
+
diff --git a/ml/dlib/dlib/image_processing/correlation_tracker.h b/ml/dlib/dlib/image_processing/correlation_tracker.h
new file mode 100644
index 000000000..f005ddc7b
--- /dev/null
+++ b/ml/dlib/dlib/image_processing/correlation_tracker.h
@@ -0,0 +1,404 @@
+// Copyright (C) 2015 Davis E. King (davis@dlib.net)
+// License: Boost Software License See LICENSE.txt for the full license.
+#ifndef DLIB_CORRELATION_TrACKER_H_
+#define DLIB_CORRELATION_TrACKER_H_
+
+#include "correlation_tracker_abstract.h"
+#include "../geometry.h"
+#include "../matrix.h"
+#include "../array2d.h"
+#include "../image_transforms/assign_image.h"
+#include "../image_transforms/interpolation.h"
+
+
+namespace dlib
+{
+
+// ----------------------------------------------------------------------------------------
+
+ class correlation_tracker
+ {
+ public:
+
+ explicit correlation_tracker (unsigned long filter_size = 6,
+ unsigned long num_scale_levels = 5,
+ unsigned long scale_window_size = 23,
+ double regularizer_space = 0.001,
+ double nu_space = 0.025,
+ double regularizer_scale = 0.001,
+ double nu_scale = 0.025,
+ double scale_pyramid_alpha = 1.020
+ )
+ : filter_size(1 << filter_size), num_scale_levels(1 << num_scale_levels),
+ scale_window_size(scale_window_size),
+ regularizer_space(regularizer_space), nu_space(nu_space),
+ regularizer_scale(regularizer_scale), nu_scale(nu_scale),
+ scale_pyramid_alpha(scale_pyramid_alpha)
+ {
+ // Create the cosine mask used for space filtering.
+ mask = make_cosine_mask();
+
+ // Create the cosine mask used for the scale filtering.
+ scale_cos_mask.resize(get_num_scale_levels());
+ const long max_level = get_num_scale_levels()/2;
+ for (unsigned long k = 0; k < get_num_scale_levels(); ++k)
+ {
+ double dist = std::abs((double)k-max_level)/max_level*pi/2;
+ dist = std::min(dist, pi/2);
+ scale_cos_mask[k] = std::cos(dist);
+ }
+ }
+
+ template <typename image_type>
+ void start_track (
+ const image_type& img,
+ const drectangle& p
+ )
+ {
+ DLIB_CASSERT(p.is_empty() == false,
+ "\t void correlation_tracker::start_track()"
+ << "\n\t You can't give an empty rectangle."
+ );
+
+ B.set_size(0,0);
+
+ point_transform_affine tform = inv(make_chip(img, p, F));
+ for (unsigned long i = 0; i < F.size(); ++i)
+ fft_inplace(F[i]);
+ make_target_location_image(tform(center(p)), G);
+ A.resize(F.size());
+ for (unsigned long i = 0; i < F.size(); ++i)
+ {
+ A[i] = pointwise_multiply(G, F[i]);
+ B += squared(real(F[i]))+squared(imag(F[i]));
+ }
+
+ position = p;
+
+ // now do the scale space stuff
+ make_scale_space(img, Fs);
+ for (unsigned long i = 0; i < Fs.size(); ++i)
+ fft_inplace(Fs[i]);
+ make_scale_target_location_image(get_num_scale_levels()/2, Gs);
+ Bs.set_size(0);
+ As.resize(Fs.size());
+ for (unsigned long i = 0; i < Fs.size(); ++i)
+ {
+ As[i] = pointwise_multiply(Gs, Fs[i]);
+ Bs += squared(real(Fs[i]))+squared(imag(Fs[i]));
+ }
+ }
+
+
+ unsigned long get_filter_size (
+ ) const { return filter_size; }
+
+ unsigned long get_num_scale_levels(
+ ) const { return num_scale_levels; }
+
+ unsigned long get_scale_window_size (
+ ) const { return scale_window_size; }
+
+ double get_regularizer_space (
+ ) const { return regularizer_space; }
+ inline double get_nu_space (
+ ) const { return nu_space;}
+
+ double get_regularizer_scale (
+ ) const { return regularizer_scale; }
+ double get_nu_scale (
+ ) const { return nu_scale;}
+
+ drectangle get_position (
+ ) const
+ {
+ return position;
+ }
+
+ double get_scale_pyramid_alpha (
+ ) const { return scale_pyramid_alpha; }
+
+
+ template <typename image_type>
+ double update_noscale(
+ const image_type& img,
+ const drectangle& guess
+ )
+ {
+ DLIB_CASSERT(get_position().is_empty() == false,
+ "\t double correlation_tracker::update()"
+ << "\n\t You must call start_track() first before calling update()."
+ );
+
+
+ const point_transform_affine tform = make_chip(img, guess, F);
+ for (unsigned long i = 0; i < F.size(); ++i)
+ fft_inplace(F[i]);
+
+ // use the current filter to predict the object's location
+ G = 0;
+ for (unsigned long i = 0; i < F.size(); ++i)
+ G += pointwise_multiply(F[i],conj(A[i]));
+ G = pointwise_multiply(G, reciprocal(B+get_regularizer_space()));
+ ifft_inplace(G);
+ const dlib::vector<double,2> pp = max_point_interpolated(real(G));
+
+
+ // Compute the peak to side lobe ratio.
+ const point p = pp;
+ running_stats<double> rs;
+ const rectangle peak = centered_rect(p, 8,8);
+ for (long r = 0; r < G.nr(); ++r)
+ {
+ for (long c = 0; c < G.nc(); ++c)
+ {
+ if (!peak.contains(point(c,r)))
+ rs.add(G(r,c).real());
+ }
+ }
+ const double psr = (G(p.y(),p.x()).real()-rs.mean())/rs.stddev();
+
+ // update the position of the object
+ position = translate_rect(guess, tform(pp)-center(guess));
+
+ // now update the position filters
+ make_target_location_image(pp, G);
+ B *= (1-get_nu_space());
+ for (unsigned long i = 0; i < F.size(); ++i)
+ {
+ A[i] = get_nu_space()*pointwise_multiply(G, F[i]) + (1-get_nu_space())*A[i];
+ B += get_nu_space()*(squared(real(F[i]))+squared(imag(F[i])));
+ }
+
+ return psr;
+ }
+
+ template <typename image_type>
+ double update (
+ const image_type& img,
+ const drectangle& guess
+ )
+ {
+ double psr = update_noscale(img, guess);
+
+ // Now predict the scale change
+ make_scale_space(img, Fs);
+ for (unsigned long i = 0; i < Fs.size(); ++i)
+ fft_inplace(Fs[i]);
+ Gs = 0;
+ for (unsigned long i = 0; i < Fs.size(); ++i)
+ Gs += pointwise_multiply(Fs[i],conj(As[i]));
+ Gs = pointwise_multiply(Gs, reciprocal(Bs+get_regularizer_scale()));
+ ifft_inplace(Gs);
+ const double pos = max_point_interpolated(real(Gs)).y();
+
+ // update the rectangle's scale
+ position *= std::pow(get_scale_pyramid_alpha(), pos-(double)get_num_scale_levels()/2);
+
+
+
+ // Now update the scale filters
+ make_scale_target_location_image(pos, Gs);
+ Bs *= (1-get_nu_scale());
+ for (unsigned long i = 0; i < Fs.size(); ++i)
+ {
+ As[i] = get_nu_scale()*pointwise_multiply(Gs, Fs[i]) + (1-get_nu_scale())*As[i];
+ Bs += get_nu_scale()*(squared(real(Fs[i]))+squared(imag(Fs[i])));
+ }
+
+
+ return psr;
+ }
+
+ template <typename image_type>
+ double update_noscale (
+ const image_type& img
+ )
+ {
+ return update_noscale(img, get_position());
+ }
+
+ template <typename image_type>
+ double update(
+ const image_type& img
+ )
+ {
+ return update(img, get_position());
+ }
+
+ private:
+
+ template <typename image_type>
+ void make_scale_space(
+ const image_type& img,
+ std::vector<matrix<std::complex<double>,0,1> >& Fs
+ ) const
+ {
+ typedef typename image_traits<image_type>::pixel_type pixel_type;
+
+ // Make an image pyramid and put it into the chips array.
+ const long chip_size = get_scale_window_size();
+ drectangle ppp = position*std::pow(get_scale_pyramid_alpha(), -(double)get_num_scale_levels()/2);
+ dlib::array<array2d<pixel_type> > chips;
+ std::vector<dlib::vector<double,2> > from_points, to_points;
+ from_points.push_back(point(0,0));
+ from_points.push_back(point(chip_size-1,0));
+ from_points.push_back(point(chip_size-1,chip_size-1));
+ for (unsigned long i = 0; i < get_num_scale_levels(); ++i)
+ {
+ array2d<pixel_type> chip(chip_size,chip_size);
+
+ // pull box into chip
+ to_points.clear();
+ to_points.push_back(ppp.tl_corner());
+ to_points.push_back(ppp.tr_corner());
+ to_points.push_back(ppp.br_corner());
+ transform_image(img,chip,interpolate_bilinear(),find_affine_transform(from_points, to_points));
+
+ chips.push_back(chip);
+ ppp *= get_scale_pyramid_alpha();
+ }
+
+
+ // extract HOG for each chip
+ dlib::array<dlib::array<array2d<float> > > hogs(chips.size());
+ for (unsigned long i = 0; i < chips.size(); ++i)
+ {
+ extract_fhog_features(chips[i], hogs[i], 4);
+ hogs[i].resize(32);
+ assign_image(hogs[i][31], chips[i]);
+ assign_image(hogs[i][31], mat(hogs[i][31])/255.0);
+ }
+
+ // Now copy the hog features into the Fs outputs and also apply the cosine
+ // windowing.
+ Fs.resize(hogs[0].size()*hogs[0][0].size());
+ unsigned long i = 0;
+ for (long r = 0; r < hogs[0][0].nr(); ++r)
+ {
+ for (long c = 0; c < hogs[0][0].nc(); ++c)
+ {
+ for (unsigned long j = 0; j < hogs[0].size(); ++j)
+ {
+ Fs[i].set_size(hogs.size());
+ for (unsigned long k = 0; k < hogs.size(); ++k)
+ {
+ Fs[i](k) = hogs[k][j][r][c]*scale_cos_mask[k];
+ }
+ ++i;
+ }
+ }
+ }
+ }
+
+ template <typename image_type>
+ point_transform_affine make_chip (
+ const image_type& img,
+ drectangle p,
+ std::vector<matrix<std::complex<double> > >& chip
+ ) const
+ {
+ typedef typename image_traits<image_type>::pixel_type pixel_type;
+ array2d<pixel_type> temp;
+ const double padding = 1.4;
+ const chip_details details(p*padding, chip_dims(get_filter_size(), get_filter_size()));
+ extract_image_chip(img, details, temp);
+
+
+ chip.resize(32);
+ dlib::array<array2d<float> > hog;
+ extract_fhog_features(temp, hog, 1, 3,3 );
+ for (unsigned long i = 0; i < hog.size(); ++i)
+ assign_image(chip[i], pointwise_multiply(matrix_cast<double>(mat(hog[i])), mask));
+
+ assign_image(chip[31], temp);
+ assign_image(chip[31], pointwise_multiply(mat(chip[31]), mask)/255.0);
+
+ return inv(get_mapping_to_chip(details));
+ }
+
+ void make_target_location_image (
+ const dlib::vector<double,2>& p,
+ matrix<std::complex<double> >& g
+ ) const
+ {
+ g.set_size(get_filter_size(), get_filter_size());
+ g = 0;
+ rectangle area = centered_rect(p, 21,21).intersect(get_rect(g));
+ for (long r = area.top(); r <= area.bottom(); ++r)
+ {
+ for (long c = area.left(); c <= area.right(); ++c)
+ {
+ double dist = length(point(c,r)-p);
+ g(r,c) = std::exp(-dist/3.0);
+ }
+ }
+ fft_inplace(g);
+ g = conj(g);
+ }
+
+
+ void make_scale_target_location_image (
+ const double scale,
+ matrix<std::complex<double>,0,1>& g
+ ) const
+ {
+ g.set_size(get_num_scale_levels());
+ for (long i = 0; i < g.size(); ++i)
+ {
+ double dist = std::pow((i-scale),2.0);
+ g(i) = std::exp(-dist/1.000);
+ }
+ fft_inplace(g);
+ g = conj(g);
+ }
+
+ matrix<double> make_cosine_mask (
+ ) const
+ {
+ const long size = get_filter_size();
+ matrix<double> temp(size,size);
+ point cent = center(get_rect(temp));
+ for (long r = 0; r < temp.nr(); ++r)
+ {
+ for (long c = 0; c < temp.nc(); ++c)
+ {
+ point delta = point(c,r)-cent;
+ double dist = length(delta)/(size/2.0)*(pi/2);
+ dist = std::min(dist*1.0, pi/2);
+
+ temp(r,c) = std::cos(dist);
+ }
+ }
+ return temp;
+ }
+
+
+ std::vector<matrix<std::complex<double> > > A, F;
+ matrix<double> B;
+
+ std::vector<matrix<std::complex<double>,0,1> > As, Fs;
+ matrix<double,0,1> Bs;
+ drectangle position;
+
+ matrix<double> mask;
+ std::vector<double> scale_cos_mask;
+
+ // G and Gs do not logically contribute to the state of this object. They are
+ // here just so we can void reallocating them over and over.
+ matrix<std::complex<double> > G;
+ matrix<std::complex<double>,0,1> Gs;
+
+ unsigned long filter_size;
+ unsigned long num_scale_levels;
+ unsigned long scale_window_size;
+ double regularizer_space;
+ double nu_space;
+ double regularizer_scale;
+ double nu_scale;
+ double scale_pyramid_alpha;
+ };
+}
+
+#endif // DLIB_CORRELATION_TrACKER_H_
+
diff --git a/ml/dlib/dlib/image_processing/correlation_tracker_abstract.h b/ml/dlib/dlib/image_processing/correlation_tracker_abstract.h
new file mode 100644
index 000000000..5514f5e76
--- /dev/null
+++ b/ml/dlib/dlib/image_processing/correlation_tracker_abstract.h
@@ -0,0 +1,162 @@
+// Copyright (C) 2015 Davis E. King (davis@dlib.net)
+// License: Boost Software License See LICENSE.txt for the full license.
+#undef DLIB_CORRELATION_TrACKER_ABSTRACT_H_
+#ifdef DLIB_CORRELATION_TrACKER_ABSTRACT_H_
+
+#include "../geometry/drectangle_abstract.h"
+
+namespace dlib
+{
+
+// ----------------------------------------------------------------------------------------
+
+ class correlation_tracker
+ {
+ /*!
+ WHAT THIS OBJECT REPRESENTS
+ This is a tool for tracking moving objects in a video stream. You give it
+ the bounding box of an object in the first frame and it attempts to track the
+ object in the box from frame to frame.
+
+ This tool is an implementation of the method described in the following paper:
+ Danelljan, Martin, et al. "Accurate scale estimation for robust visual
+ tracking." Proceedings of the British Machine Vision Conference BMVC. 2014.
+ !*/
+
+ public:
+
+ explicit correlation_tracker (unsigned long filter_size = 6,
+ unsigned long num_scale_levels = 5,
+ unsigned long scale_window_size = 23,
+ double regularizer_space = 0.001,
+ double nu_space = 0.025,
+ double regularizer_scale = 0.001,
+ double nu_scale = 0.025,
+ double scale_pyramid_alpha = 1.020
+ );
+ /*!
+ requires
+ - p.is_empty() == false
+ ensures
+ - Initializes correlation_tracker. Higher value of filter_size and
+ num_scale_levels increases tracking precision but requires more CPU
+ for processing. Recommended values for filter_size = 5-7,
+ default = 6, for num_scale_levels = 4-6, default = 5
+ - #get_position().is_empty() == true
+ !*/
+
+ template <
+ typename image_type
+ >
+ void start_track (
+ const image_type& img,
+ const drectangle& p
+ );
+ /*!
+ requires
+ - image_type == an image object that implements the interface defined in
+ dlib/image_processing/generic_image.h
+ - p.is_empty() == false
+ ensures
+ - This object will start tracking the thing inside the bounding box in the
+ given image. That is, if you call update() with subsequent video frames
+ then it will try to keep track of the position of the object inside p.
+ - #get_position() == p
+ !*/
+
+ drectangle get_position (
+ ) const;
+ /*!
+ ensures
+ - returns the predicted position of the object under track.
+ !*/
+
+ template <
+ typename image_type
+ >
+ double update_noscale (
+ const image_type& img,
+ const drectangle& guess
+ );
+ /*!
+ requires
+ - image_type == an image object that implements the interface defined in
+ dlib/image_processing/generic_image.h
+ - get_position().is_empty() == false
+ (i.e. you must have started tracking by calling start_track())
+ ensures
+ - When searching for the object in img, we search in the area around the
+ provided guess. This function only tracks object position without trying
+ to track the scale
+ - #get_position() == the new predicted location of the object in img. This
+ location will be a copy of guess that has been translated and NOT scaled
+ appropriately based on the content of img so that it, hopefully, bounds
+ the object in img.
+ - Returns the peak to side-lobe ratio. This is a number that measures how
+ confident the tracker is that the object is inside #get_position().
+ Larger values indicate higher confidence.
+ !*/
+
+ template <
+ typename image_type
+ >
+ double update (
+ const image_type& img,
+ const drectangle& guess
+ );
+ /*!
+ requires
+ - image_type == an image object that implements the interface defined in
+ dlib/image_processing/generic_image.h
+ - get_position().is_empty() == false
+ (i.e. you must have started tracking by calling start_track())
+ ensures
+ - When searching for the object in img, we search in the area around the
+ provided guess.
+ - #get_position() == the new predicted location of the object in img. This
+ location will be a copy of guess that has been translated and scaled
+ appropriately based on the content of img so that it, hopefully, bounds
+ the object in img.
+ - Returns the peak to side-lobe ratio. This is a number that measures how
+ confident the tracker is that the object is inside #get_position().
+ Larger values indicate higher confidence.
+ !*/
+
+ template <
+ typename image_type
+ >
+ double update_noscale (
+ const image_type& img
+ );
+ /*!
+ requires
+ - image_type == an image object that implements the interface defined in
+ dlib/image_processing/generic_image.h
+ - get_position().is_empty() == false
+ (i.e. you must have started tracking by calling start_track())
+ ensures
+ - performs: return update_noscale(img, get_position())
+ !*/
+ template <
+ typename image_type
+ >
+ double update (
+ const image_type& img
+ );
+ /*!
+ requires
+ - image_type == an image object that implements the interface defined in
+ dlib/image_processing/generic_image.h
+ - get_position().is_empty() == false
+ (i.e. you must have started tracking by calling start_track())
+ ensures
+ - performs: return update(img, get_position())
+ !*/
+
+ };
+}
+
+#endif // DLIB_CORRELATION_TrACKER_ABSTRACT_H_
+
+
+
diff --git a/ml/dlib/dlib/image_processing/detection_template_tools.h b/ml/dlib/dlib/image_processing/detection_template_tools.h
new file mode 100644
index 000000000..b22c109fe
--- /dev/null
+++ b/ml/dlib/dlib/image_processing/detection_template_tools.h
@@ -0,0 +1,113 @@
+// Copyright (C) 2011 Davis E. King (davis@dlib.net)
+// License: Boost Software License See LICENSE.txt for the full license.
+#ifndef DLIB_DETECTION_TEMPlATE_TOOLS_Hh_
+#define DLIB_DETECTION_TEMPlATE_TOOLS_Hh_
+
+#include "detection_template_tools_abstract.h"
+#include "../geometry.h"
+#include "../matrix.h"
+#include <utility>
+#include <vector>
+#include <cmath>
+
+namespace dlib
+{
+
+// ----------------------------------------------------------------------------------------
+
+ inline rectangle compute_box_dimensions (
+ const double width_to_height_ratio,
+ const double area
+ )
+ {
+ // make sure requires clause is not broken
+ DLIB_ASSERT(width_to_height_ratio > 0 && area > 0,
+ "\t rectangle compute_box_dimensions()"
+ << "\n\t Invalid arguments were given to this function. "
+ << "\n\t width_to_height_ratio: " << width_to_height_ratio
+ << "\n\t area: " << area
+ );
+
+ /*
+ width*height == area
+ width/height == width_to_height_ratio
+ */
+ using namespace std;
+
+ const int height = (int)std::floor(std::sqrt(area/width_to_height_ratio) + 0.5);
+ const int width = (int)std::floor(area/height + 0.5);
+
+ return centered_rect(0,0,width,height);
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ inline std::vector<rectangle> create_single_box_detection_template (
+ const rectangle& object_box
+ )
+ {
+ std::vector<rectangle> temp;
+ temp.push_back(object_box);
+ return temp;
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ inline std::vector<rectangle> create_overlapped_2x2_detection_template (
+ const rectangle& object_box
+ )
+ {
+ std::vector<rectangle> result;
+
+ const point c = center(object_box);
+
+ result.push_back(rectangle() + c + object_box.tl_corner() + object_box.tr_corner());
+ result.push_back(rectangle() + c + object_box.bl_corner() + object_box.br_corner());
+ result.push_back(rectangle() + c + object_box.tl_corner() + object_box.bl_corner());
+ result.push_back(rectangle() + c + object_box.tr_corner() + object_box.br_corner());
+
+ return result;
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ inline std::vector<rectangle> create_grid_detection_template (
+ const rectangle& object_box,
+ unsigned int cells_x,
+ unsigned int cells_y
+ )
+ {
+ // make sure requires clause is not broken
+ DLIB_ASSERT(cells_x > 0 && cells_y > 0,
+ "\t std::vector<rectangle> create_grid_detection_template()"
+ << "\n\t The number of cells along a dimension can't be zero. "
+ << "\n\t cells_x: " << cells_x
+ << "\n\t cells_y: " << cells_y
+ );
+
+ std::vector<rectangle> result;
+
+ const matrix<double,1> x = linspace(object_box.left(), object_box.right(), cells_x+1);
+ const matrix<double,1> y = linspace(object_box.top(), object_box.bottom(), cells_y+1);
+
+ for (long j = 0; j+1 < y.size(); ++j)
+ {
+ for (long i = 0; i+1 < x.size(); ++i)
+ {
+ const dlib::vector<double,2> tl(x(i),y(j));
+ const dlib::vector<double,2> br(x(i+1),y(j+1));
+ result.push_back(rectangle(tl,br));
+ }
+ }
+
+ return result;
+ }
+
+// ----------------------------------------------------------------------------------------
+
+}
+
+
+#endif // DLIB_DETECTION_TEMPlATE_TOOLS_Hh_
+
+
diff --git a/ml/dlib/dlib/image_processing/detection_template_tools_abstract.h b/ml/dlib/dlib/image_processing/detection_template_tools_abstract.h
new file mode 100644
index 000000000..30b0ad5b9
--- /dev/null
+++ b/ml/dlib/dlib/image_processing/detection_template_tools_abstract.h
@@ -0,0 +1,95 @@
+// Copyright (C) 2011 Davis E. King (davis@dlib.net)
+// License: Boost Software License See LICENSE.txt for the full license.
+#undef DLIB_DETECTION_TEMPlATE_TOOLS_ABSTRACT_Hh_
+#ifdef DLIB_DETECTION_TEMPlATE_TOOLS_ABSTRACT_Hh_
+
+#include "../geometry.h"
+#include <utility>
+#include <vector>
+
+namespace dlib
+{
+
+// ----------------------------------------------------------------------------------------
+
+ rectangle compute_box_dimensions (
+ const double width_to_height_ratio,
+ const double area
+ );
+ /*!
+ requires
+ - area > 0
+ - width_to_height_ratio > 0
+ ensures
+ - returns a rectangle with the given area and width_to_height_ratio.
+ - In particular, returns a rectangle R such that:
+ - R.area() == area (to within integer precision)
+ - R.width()/R.height() == width_to_height_ratio (to within integer precision)
+ - center(R) == point(0,0)
+ !*/
+
+// ----------------------------------------------------------------------------------------
+
+ std::vector<rectangle> create_single_box_detection_template (
+ const rectangle& object_box
+ );
+ /*!
+ ensures
+ - returns a vector that contains only object_box.
+ - In particular, returns a vector V such that:
+ - V.size() == 1
+ - V[0] == object_box
+ !*/
+
+// ----------------------------------------------------------------------------------------
+
+ std::vector<rectangle> create_overlapped_2x2_detection_template (
+ const rectangle& object_box
+ );
+ /*!
+ ensures
+ - Divides object_box up into four overlapping regions, the
+ top half, bottom half, left half, and right half. These
+ four rectangles are returned inside a std::vector.
+ - In particular, returns a vector V such that:
+ - V.size() == 4
+ - V[0] == top half of object_box
+ - V[1] == bottom half of object_box
+ - V[2] == left half of object_box
+ - V[3] == right half of object_box
+ - for all valid i: object_box.contains(V[i]) == true
+ !*/
+
+// ----------------------------------------------------------------------------------------
+
+ std::vector<rectangle> create_grid_detection_template (
+ const rectangle& object_box,
+ unsigned int cells_x,
+ unsigned int cells_y
+ );
+ /*!
+ requires
+ - cells_x > 0
+ - cells_y > 0
+ ensures
+ - Divides object_box up into a grid and returns a vector
+ containing all the rectangles corresponding to elements
+ of the grid. Moreover, the grid will be cells_x elements
+ wide and cells_y elements tall.
+ - In particular, returns a vector V such that:
+ - V.size() == cells_x*cells_y
+ - for all valid i:
+ - object_box.contains(V[i]) == true
+ - V[i] == The rectangle corresponding to the ith grid
+ element.
+ !*/
+
+// ----------------------------------------------------------------------------------------
+
+}
+
+
+#endif // DLIB_DETECTION_TEMPlATE_TOOLS_ABSTRACT_Hh_
+
+
+
diff --git a/ml/dlib/dlib/image_processing/frontal_face_detector.h b/ml/dlib/dlib/image_processing/frontal_face_detector.h
new file mode 100644
index 000000000..3f4b59769
--- /dev/null
+++ b/ml/dlib/dlib/image_processing/frontal_face_detector.h
@@ -0,0 +1,2373 @@
+// Copyright (C) 2013 Davis E. King (davis@dlib.net)
+// License: Boost Software License See LICENSE.txt for the full license.
+#ifndef DLIB_FRONTAL_FACE_DETECTOr_Hh_
+#define DLIB_FRONTAL_FACE_DETECTOr_Hh_
+
+#include "frontal_face_detector_abstract.h"
+#include "../image_processing/object_detector.h"
+#include "../image_processing/scan_fhog_pyramid.h"
+#include <sstream>
+#include "../compress_stream.h"
+#include "../base64.h"
+
+namespace dlib
+{
+ typedef object_detector<scan_fhog_pyramid<pyramid_down<6> > > frontal_face_detector;
+ inline const std::string get_serialized_frontal_faces();
+
+ inline frontal_face_detector get_frontal_face_detector()
+ {
+ std::istringstream sin(get_serialized_frontal_faces());
+ frontal_face_detector detector;
+ deserialize(detector, sin);
+ return detector;
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ /*
+ It is built out of 5 HOG filters. A front looking, left looking, right looking,
+ front looking but rotated left, and finally a front looking but rotated right one.
+
+ Moreover, here is the training log and parameters used to generate the filters:
+ The front detector:
+ trained on mirrored set of labeled_faces_in_the_wild/frontal_faces.xml
+ upsampled each image by 2:1
+ used pyramid_down<6>
+ loss per missed target: 1
+ epsilon: 0.05
+ padding: 0
+ detection window size: 80 80
+ C: 700
+ nuclear norm regularizer: 9
+ cell_size: 8
+ num filters: 78
+ num images: 4748
+ Train detector (precision,recall,AP): 0.999793 0.895517 0.895368
+ singular value threshold: 0.15
+
+ The left detector:
+ trained on labeled_faces_in_the_wild/left_faces.xml
+ upsampled each image by 2:1
+ used pyramid_down<6>
+ loss per missed target: 2
+ epsilon: 0.05
+ padding: 0
+ detection window size: 80 80
+ C: 250
+ nuclear norm regularizer: 8
+ cell_size: 8
+ num filters: 63
+ num images: 493
+ Train detector (precision,recall,AP): 0.991803 0.86019 0.859486
+ singular value threshold: 0.15
+
+ The right detector:
+ trained left-right flip of labeled_faces_in_the_wild/left_faces.xml
+ upsampled each image by 2:1
+ used pyramid_down<6>
+ loss per missed target: 2
+ epsilon: 0.05
+ padding: 0
+ detection window size: 80 80
+ C: 250
+ nuclear norm regularizer: 8
+ cell_size: 8
+ num filters: 66
+ num images: 493
+ Train detector (precision,recall,AP): 0.991781 0.85782 0.857341
+ singular value threshold: 0.19
+
+ The front-rotate-left detector:
+ trained on mirrored set of labeled_faces_in_the_wild/frontal_faces.xml
+ upsampled each image by 2:1
+ used pyramid_down<6>
+ rotated left 27 degrees
+ loss per missed target: 1
+ epsilon: 0.05
+ padding: 0
+ detection window size: 80 80
+ C: 700
+ nuclear norm regularizer: 9
+ cell_size: 8
+ num images: 4748
+ singular value threshold: 0.12
+
+ The front-rotate-right detector:
+ trained on mirrored set of labeled_faces_in_the_wild/frontal_faces.xml
+ upsampled each image by 2:1
+ used pyramid_down<6>
+ rotated right 27 degrees
+ loss per missed target: 1
+ epsilon: 0.05
+ padding: 0
+ detection window size: 80 80
+ C: 700
+ nuclear norm regularizer: 9
+ cell_size: 8
+ num filters: 89
+ num images: 4748
+ Train detector (precision,recall,AP): 1 0.897369 0.897369
+ singular value threshold: 0.15
+ */
+ inline const std::string get_serialized_frontal_faces()
+ {
+ dlib::base64 base64_coder;
+ dlib::compress_stream::kernel_1ea compressor;
+ std::ostringstream sout;
+ std::istringstream sin;
+
+ // The base64 encoded data from the file 'object_detector.dat' we want to decode and return.
+ sout << "AW2B5ZIvv09mlKLVYjKqbJC05yeR2KsCpPGEGOgn2QlwM92S4UT4HgQkV0V9WqYRf6xETTSVKz7Z";
+ sout << "YcJ84Jc4C3+VdPgZDhV+LDt6qAt3OI4nA9zN4Y9cCIb6ivlETkN/JMmapbOAUW2mrSzDif5zjAaq";
+ sout << "+NFvw/5V0Jciopw9tR6nYtV41unWGvyyfsO9CcqvDy81QIydToHh0a7UaL0jCtA2DYzkViDufxyv";
+ sout << "Kpsn4xMyiU0haM1ge3UktIO48io/gSzjEKu0YYAffbD2YO1IE34tUH15Z3Z9NjkBFxTytDgrMxk8";
+ sout << "i9MYq+Nl9nS421aogmec3ugExJYjLZMHs4KAk71jvG8vtJyJEA3qyLY6lvONt98gzQwGQ9+2B6de";
+ sout << "ocb/DDJUza6mvudHQNJBYraR4gCWcIn9gFu2rJiRHf4IiqP4GEB3B1zKiHfJRo9jZbhxQUitAxAx";
+ sout << "U/E2SuuHGZDilqK9AJ4K41RAudraxF9li/Bs4f+CK3G8Z/c97P7WLVekJL2ws+MsCdL9ObHE5ePD";
+ sout << "uLLQWBy5NUbgPVM6HEnhnOiZk3rA4DYNqbABy3uemablAln9BLGkk4wrm2UcicacnzY8Aq054Ttb";
+ sout << "3CCTcG4SOSPfePl/7T1M6Uy1hOesp5MpXfUR8gBKr4466dbdXCDHSahI05gra6NzxkOpOo2mOqBg";
+ sout << "LYNGZUkHK4tdRyyD12N1MH+nJiMJbgk+qj54t5i3AuEr/71HTRXoTT8AEYbvc9y4f2WAlliQYXPn";
+ sout << "O2Uaza3lKYrH7mFjKMNhLfvrezy9fe+1asbSlRKelnU3eY4lhD6fTVJjXqZypBfMnfmGQQJ0Q7g5";
+ sout << "1Z/9GzpRyZnPSzQljtJgzVp8Gk0z3fuKiXPO9g+s4XL2cEuxBOFij0KGTy4eNitM0gcPc6xzp3tz";
+ sout << "6Wv0W2h7w4h+V8Bzvyn8ag1sbEO0G1Lf2BrDVM9+pNxFoWFxYHqdoOmJPVvb8PRQqoC5bkqhplFr";
+ sout << "TR5l3XsQedgwsnkadxNZQ3MbRJyo0JU0kvV1cfphLcn24MIIKqAnw3daXqbJaba+oCUep5GTuzI7";
+ sout << "nad7ykHNN0iFkgYXMmXJl+F5TsS8y+izuHlXAX6wX1qRVzWJwCpM5oVVG/5eYTzg0J9C1bCcNyHL";
+ sout << "2w5TJFYrD8bq3O+Y3fiO5LJ8F5/vsu2EBUMi1+eP1WfsTwd6N9jFtF5gA5sHX3zI925aDqVx9byr";
+ sout << "j4X5yr68p5P6f8wSLL8jzW8i4a0yP3zXlqN6QQDY1ssfNsMf43tOTtmbBlmxviL2egs4gvadD7Gd";
+ sout << "fRNowL71P3mkqRmnrnihlI01NbDl+Trzsh3EOn43PRC9nl8yo+fYVH8GqS8JGy1xOw4G479vOifI";
+ sout << "9GC4BGnSDJdKgSnBwI1AJQ2TT8EZ//56lkRlgusg25TwC7uQ1zreeL6baYdgfXSggx3ULdNDGl5o";
+ sout << "ftRK9LDaop6XvB6I0ITsLYvAoGP/5sHfttDj6HlQW/LlzkSPmzY/FtV6h6bE+k1gG7BANrQjwOW5";
+ sout << "sfHNYadD1v4zIFdt2su3docGbGP/iDMvM+BmYIBP86zX5eIlTYwDmxXht95T6GCCjS/XuMMy12hd";
+ sout << "Fdb6lm1O42ieM4KQ/2EOFy3Ij+YOIapzYA6p6Jz9dtINpCojgUHyo6xc4HTNnEKRy+YN+awhb1l2";
+ sout << "FJdy2/QI3xGVNNTnWcQrsvjGZb/Z3VaZUltrIbnCeEZOeOCM0TxkBEhqFfI3qwMx8PUj+imUlTDM";
+ sout << "7N+p5sxmKLliHHovOO32ajBTKUSI9IMQzf3QY6dZDts4JkMYQ1xc6lpm679s1KMVVrWuOqiAU5Vs";
+ sout << "qehfnl+oMRngi0G0BnMne45CjU5RECvhg+Vkkxx0kAp38+9pY3XiO/DuyIxpOSPip2o0+9rZLF1Z";
+ sout << "cAUGnG85CFEXl96wpxvqVlIULUV2+pNJxdU+q1MkCsxDeXrvfjhEAJpPE38dUb3t4blsNUZ3wJ2w";
+ sout << "s6cXe0nEPWNkZlmEsXcFpw5zHe0Gd7YpXigz7Z+IVhvplpv686TJiLTpVPW2T1uJvSmMuG/FqvT5";
+ sout << "JIIMg2of1ydicw5EbWrqhIUzllX3l0u00gFziPmKAioiqCxjWojd9l3Q0Q6IsaZAH+WzV2xFabbY";
+ sout << "4b8SwoFvhe4qnUQLFdOSTbzeDIKP9B8bSiQwbjUBg3jYEWUrMz+eR9lpGu8603vChIEXaTxyMrO5";
+ sout << "SCeaVOgPE77potDoSUV1hsoW7ZqGCFH+AGyVTohitS0iqZbIxC7+7rnVP8XfXw5YpSajF94z2TSd";
+ sout << "jW0KpmuCZ88DTCPFamf5zh917qp/PzQOGTdalr+Ov+ogvrJraDnoE+ONWrdHqBm7Adgn8/wy5vzX";
+ sout << "fNu1AT14eYrEmWmXvt6JDAbBYqP8Aw8b1QRZff11MblUh0IpztedWhifGy/RFJUN0/e66Mh0cKeF";
+ sout << "plmK6NqchTzOQMKJVq9jxdyurcjcA0uu4dVJ1XXkAtxBim2J2m0zcwX/+HcRe9VbeNehmDbUC49o";
+ sout << "ktNvrwbbB1IUV/c0MNCruV359DVINXskQTK12g2X5qprOLW+YPO6CnTFpJRsiFBoLllF1sUTjROH";
+ sout << "SrHHRYp3W5t5gqfT4afBxmtTmpJEG0oG4eNfMhxEhQ7HjoVhahOM6px9Be9S+4ca/w+zII7NnUkY";
+ sout << "Iaas+FW7vhOIDOiV82SpJqBjdY9eIP//XGR1DFQKI5cLKmT2/DF8tB9XcqTgmVWNMVt9Xw21CaeR";
+ sout << "eYeoWvLHlm8o7ahtJCSQ0iHypTZMA16wdJ5IJD5WoYd50rUn58RBa9sTXT/t/KhxJfG5OWXl55eq";
+ sout << "abYojSlluFyvFSk7Z/wu/EqFUEBD8r4OIrlJCMZl6kKy4EncmjUrb3mG6aDKxsaRBRBkRRya9t77";
+ sout << "epMG60v3MRCcY9E+n9sXAOUpf+ErN7iD6FY4XFpq5R0Z+6MiLRE0af/JQ9R42quTl8CLH7609DDd";
+ sout << "s8+8bKA2zjvSJhWbwGURRCW8SK9tNKuemwkt3Eutm+xMJemP2JIVFVXYxjCvDmxIIODneu1vmcSy";
+ sout << "XadKkyjtYDwacddFAqGh0kLqHX9i/WoedVKC1Vuup+AYPkyZ1lPraGVqjq0nsiwp/vxm9c/+4/wS";
+ sout << "hW99Q+zoAZ0IWWeYAqcXGdZqvd58gx0/fmU/Pq4FqtCdJ2qnoUDMvjZeyWE7lA/Xf7AdLcz4XHNz";
+ sout << "VAidxMj7/K8p3KdK+XqED94Ey1WzpUQ2mH+10Zq/6jebtoYJlht9meMsjvjWxg4nwFIZY1QAMZPV";
+ sout << "phcmEwrLA+Z/Xjo+FEq4hKD8pIriQi4xT4uAoPzOFGp/ziwBbAb/EfYsspnVxpnERKblbDsV9bFK";
+ sout << "df5VSgeqg7p2auZBk/WkX/wOeXkulbiJA5lXTsgInJGoREJ+uaudFadnLD1pmjqq+VFJW6XOT++I";
+ sout << "arRHT1sYJY5mhqFztTeUGH5VXZNtRGl1nWpewvmgyK6T5XLUcuZqsyZVtzkkQ0eSR4h+nBuRAQZK";
+ sout << "EmqzcPrRKObVC7Xv+kMcnM+M2+zCuZoUSO/zt7OOXNt/B51oQ2DRqthPxgzUrWOvoOgZleeayImR";
+ sout << "rqG2QnkA8+Kb/mhxJ+SAqOjsIJATfLc13SzVIKVumz+uX5jUiZXWfWF/e1cdS1w3Nf+dNinnGQ2v";
+ sout << "Vf2SxiRlTfDTZTZXcLlT8VrCOP4UYvg0QrzBqU2myM41lZDUS2X/WOzvrNrRpEoCS6/OcbvMj5gf";
+ sout << "dzoZ1oqvaL3dosQ9/QAwI7wPC2/QQTRyDIbl4EuhcX/ebyueLqlxKRLPrmq/mE2YU6aU7cf+t8PM";
+ sout << "LX7J0eyNGl00TEWN0R7ui2xlfdnLfmILs+lNNthYvmUbtbncoqx0sCWgjk1Iqagp9uFlWA+6vMa6";
+ sout << "nx7Qg0Jz+Qn2u4iZpyGDZKUWmHgDYhRcKfsnjnzbztyNms4tEnmwtIeLwDqFPlC4BKefz9gja+tt";
+ sout << "0Om7TAtwcmWHve1ENSOQSKTLNvVwhjRLgmfK9SFUUjKeVSMp1g27Rf2WCoDyBXhRauHpqCdj7GH4";
+ sout << "AmlI3EUHSjS+/1ZoT/2DnHwuN+GKVh0d8k/7sGrln94r3JuxEPvfyQFvPSRlFkWYyKPdxb+H37L7";
+ sout << "DCYzgf43vxZ82JGYVXB7QBpO0VeEXiQcIgXV7uOsGKiXfFUwueL4kdNknk2hAfrFFQcpoBiQSz89";
+ sout << "sNRT1tH5Ipbuf19R66cogiaAesXnm42jLjQpnkMwd3S4F6P8zDL7m13u6ahcrWvZiyDuBc/+Th2+";
+ sout << "Swex+BSv2TkUorR6nwVoozQEA16/MlZB1acIFSrD0kSr22Vubdbo05svEAZ7DKIdQjDu2wTTOOvL";
+ sout << "IVJOYPjdxXZytBv1jRIhUyyYBvRtaFsl57ZWAmvbFEXZXLihnrBskTqxrxNPqhg+bLxicTDlFyHI";
+ sout << "UIipzL4AvofdYWolB8RvFyom18/szC67Flr1OW4axZ+k5S7249Y64eqtU1hk98joIhOdWaWBHxkL";
+ sout << "nP+ooeHeEvB5hNRIA23Yxoh4zzsWUB1KKvg1XRzjt2CBQ2FPaCfHsOKf52aaj0W2FByC81rpryrm";
+ sout << "Ye51T5zP5/N5j7yA+a907774PwIS3eYYyJRUCSh0ywfQ8rgkbjBdf2rKa0alzokz7Kmo2Iswnid0";
+ sout << "WzpliQr9KaPwAk7hkLjprMjzdJIug3KOKVAgygXP7rkgETIfTfZRG49EdJOjlW8mlmHZsO+arTFW";
+ sout << "vj0FgJCAQrrX0X9BOQ0MPu0friAGK0TNGsFs17lcHjaRNHXz3v6dY/MSR2TY82iyEkRofvNY/Xjr";
+ sout << "FRB2KM6Aq2pPpIjY4EuSQS5sU9ur5oxrKo68jNzoB9iRvmKhQq5HRSYKL5ACBF85HM2oWtyVl23y";
+ sout << "TqTW+jwNHfF+sc0FPS6xfwr3yvHi/OVlW046gnNLKOxO3RjGntaJeX8EVXGGpXDHLyle0UaZE0iG";
+ sout << "1xeLZjyKq5wRJ/Q1MPry/JJbfCXSIHeO2Uznqn7O5rcs5v3Z1PdlF7BfPUhnP7+Wcqryfi4xJ8rS";
+ sout << "BzyJkibOCzegvXnKTTw7q5/lrgh7LxfrY/4G6/Js8ibrUU9NGqBkOHUmxa9P7UPK43pz/bS7SWtl";
+ sout << "yA/3hBa0bv6hN0OeXVaBxtr8sMfS7FcvR3wtvmtKn4BlIYer3LMSvigPCK3K5seTPH3cx0J2uGzf";
+ sout << "SlPZus5idN8MnFCEiBUbs4W1M/BSw9EYA9rJyhDTyOYqKr6s1kagBUoVCXVlEPVgrJoppc4vLghu";
+ sout << "NMgUpcakhT8SAulssCjPb1UWPF92XPpn8/byK8dJoSFe1lfFb5Yog5YZMjgoKKbokk0n3eMlrbm2";
+ sout << "AGwIh0acdOXRR+lpeJQ240N/Waw3e+FhAI+AYfOkIXodtQcod08+F8uHCAAcd9dZvYyxZxNKjbCc";
+ sout << "aYTFUYPN53OZEwEyCIFWwPf0QhdhlpyAGCj9gqVU4N9b5FJYX2ZqVAl5JF4nl9yDWrJ3zmhwL4r1";
+ sout << "P8Pdv02ysNeZu76Y60+ffPXCqmjHjllu082gde9BXIEWdS1sd5qaH0qb8KRpV8WAYaM7/ccGTHQ+";
+ sout << "H+0C5o2904WS3MG8rR6LI6EqO2fcBnJzZ5BJX2bHv4kNHhQiW2tZjBlwKjuMH8Ddayd1BVqzjeuH";
+ sout << "5dfcL8xV4su36eRT/Vmanq/NZ80+KXsXZO1k88RIfQwwZdt5XribJfUSwzKGsKQrhu+8iUCjGP8l";
+ sout << "ScrIRdj3gjy2brM+zBr9z9pvFZR5NLjYN1Ko2BptMbEDxdjnYkYWix8BF1P+/PtSEJeGATIyl2al";
+ sout << "rAlEHX3ysdDjUic86ZNUx9c6N59ZcQkIr7IwFl6kc5sbuthroXmAnbW0A2UIO/LN6KFbbE53Up4Q";
+ sout << "KwoMeMHxlgEwundK+LV5WZ136K5JoA6SpvxzuKhCckg0Ev4+KtyA+1wlna6AHOQaj24BzblSd9k4";
+ sout << "2lWsVOwAOtGxFIRIxpou7S4yqPrvS93KPtVkrHDBqIveGcwoGfyw2ZSX+5o5SIZ5PUG3mFM/sNWw";
+ sout << "twketaHdV/ndITa3aJyGpqChs3hcwMOgODnpC+vjtY1D8zdp3pn4MBgb33jxc5kOCpDktiKGyaQN";
+ sout << "sQ7oaOy4aKmr4TFfWbrH3qeR9gz0utGL/iVHcgSlfl8rw4BFncc8HIB0SGJJhYE+lfEYpsP8H+1p";
+ sout << "pfG0yzIA752vcaOIWIGt+C/EvuXl5PP8qyE0aBe637yQd1aMyRhf46rsAIlhzwZ28wPYZ9KCaC41";
+ sout << "ap2+7/EJMw3HramAAo5OVqA6M5cV2V+MlGifSoVgTN+5TaY9EnqexQy2Gqw+9484Tv7QNVaEtwtY";
+ sout << "/O+aQ8nzc6H+clWCWJkWDvoqIrIqP4jFUaJ/FnqlPEb2GkPoNluJV92HqQj6fD2Iz0TQKCVQkVWq";
+ sout << "D/QuFVq8c+EC8Bz2j1cI0D30iwROmneb6XHTYVwn4yHkZ6LAoOz28fjT6dwJFdYo+Ci4Hhyl62tW";
+ sout << "P0al3X19i/IjH4Xi+ZH+lISFmA0oEJo4AG/oAklXtGtRtIwfKGIuIzqqEztmX9tY+INu7PtgH/FP";
+ sout << "z7d2f3CBZTZY4qTPMEPQ/th8jnjHrROZIM7Cej4v+zYms7NPlJ7x/k+eX5ISG7xEbWr8j+kr+R70";
+ sout << "bjGaz/rED73YxTMBmhQSKMDUjNaW/qclrQuvaNwUgM/VCtnY7NANztFMXhCa2hGjZaG/bp8Yc9IN";
+ sout << "T20nhrbTX+KPkcEmQjsHwyK8hT9XN6J+TD4iwdnb4A/KQI9JwaqpYPp0S1d99j0iqXlirvdPcotu";
+ sout << "AsUmNf1YOlK1I5KxaFA42emXXmg1vr7USuKiX62IslSjknRY0+bPxOcn09P0VK+HTTdLIZ+8p9+k";
+ sout << "fKsgY8ajl6qZ/LP5qbZ2KgHJHJNArwRSxNn4CR5ish97R+1A3DglEaWJ4vVuu4oaFIHc9eSgRMdJ";
+ sout << "IPJ52p+8SKIpjM3Tnig/Gw35R+sPcuZlpauFplYb3vcIoY7vN/f6+RKxPtWnuOfBh1iPJxJJfz+H";
+ sout << "MDVZihR471I+DLXgGrZ0fgMQZqVFelhF5eszKMOxB81TbRuPqUmneRijtWvR8QAySqzV3o+OoM/n";
+ sout << "fpoLxmcVQm3LanGF1VfCbk7X9dhocgWTpk9XjDzVjIjPceJ2IPuFjHcrNtu6L/fe/6sMqkKWNRQH";
+ sout << "8GGbrJU0/kqXIeYch5gXjiKFTIU/QIRt0e4YKlNV0Rpqkh8vY/X3OL8xbNCBd0bM4nCXMp5Ytwyw";
+ sout << "DEyjzBl4SvxgGSqG6ehAzY1LrZ3bHU0Bn/Q7vD6RIEr/WcxUvdr8oy1JuIey4PllgfcCaDdW1+wG";
+ sout << "YCz/81Acw7xOiG//gLZ+tApj+tGpMP3Z/vnC7bZmXAmXWCfZeWDwIcxX/V5Sco8G21PpzMYPM7k0";
+ sout << "1MEkR1PgNhpKv5he6criGZ6D/xVAfVJbxc7blkovBkLh109MFBCAGiA8zk3MAShzI7cynZVbyWGw";
+ sout << "+x5Bvl8/6xSUyG0MLFANqDilWFIEBpT8h6G6StX3WXoEfqJrO2pYMjQdqOg08AvXKWJg/xj4U2Mw";
+ sout << "m8+nK+zX8aXHC333WcQ+1eG918/0TEDoQAXep1atGq3wir0iBvurJHbOXffjGQalMd3AeFCLWaFn";
+ sout << "7tYSTWYcPnWwWuA47FxTSOPezm1PrihBIC7CyVjGHAGvtBdh2EjCVptJHYgft9Ivp0YpPFaGtT1c";
+ sout << "IsaiWl+dF+Yg0K4FVIpNqRq/g1EEpGni0mrTmlTKeeSiKzAXdGnjOZ/9woea73BFZkY0kAqMlrn4";
+ sout << "6AOIuXh9Af1UJe0OxzhcYKFjFuzj7Imjv0SgNaah5XePYFfLyqNUCctmTlFna9nZWZ4/Q/N0tqN1";
+ sout << "QJwMtZOdFdKoSwFcDrSuMBc2kKNCEgnXAB9azTyR6Frs6RDNbCOdmMKEIF0Ra6v4fqO/rzc+m8nM";
+ sout << "2GAyE9yBNQq1THcQSqlataFHDe9KkmlQ41F9hKifZEPJ2eMe4WbpMdXmjT0nNmxif9OiPMKR28EQ";
+ sout << "pcqtuJxTE2oQArxmoOD6uUSUpm+Xc190raj1/JA7kfFQPkONEkNn9fYRh7J9VvPk58RIkyDL3RfG";
+ sout << "SjlzXsvz0d2uU14U8ppyPSOUEgcvxUu9Zk/TcwZkWvQeJTPd/i7jbUyAHTPXy0secfKXWSoF4T1S";
+ sout << "AuuRuErtEIXmJm6bd3v6ozR+Vc494q5Nu80EGIEy+09XWaDi8E0ChYGPUn15jWmkw9aZ2SUGju+0";
+ sout << "OS7eaGTSBcbS7l3IaP+053oZvh6NN+iYo5Lb0rs+bog58fqpXLFLeaJFnHUmZipr4oX2EfpI3FuE";
+ sout << "1I7xjgdZiWMq57u9UId2PuNahTVN62Du790tZhGfoAACZxKx9xxi8nRwxz1Rh8uFosXdHJridfzF";
+ sout << "gzZhDTmxJjYCq7tg6769BcDtHxT2G+JOh2hMFV+aieGZEkBEfj6EWhuot2jR+VVjpLUhys154Fj1";
+ sout << "NLN0d0zMnJDThQlNGigIaHgVdQ+l/lNtN9ovAuVJRib/fYnSDRBpOQpOU5NuwyHjeHnYg20iKuBT";
+ sout << "ZWphFPD7M+zYlrVVH9Lg0AsaY5Yt0U7g+TXLuT/bi2tUz3rrrk/5bY7iLkGbEmOFZmxzXXqWdm8h";
+ sout << "ENOKVj/yrgSa/l93WqyOESSZk7hLMvP+OVkSj8qAKKBQ1+XyqVLODZZae0volbIcZe3HAAIjdYTm";
+ sout << "+JQfIGAWgkqcHwgv3WJiGPhq1WOVi4FSq2Dgxi5/J6cRg1Smsr9aCx0uNC2x362lI8Jd9yKn8m+m";
+ sout << "te+3Zx6sx0NCnYKxaWcH3V7BfF0hp0WQJ3vQbPG20PD/ACHvMEgmo2dDFit6m4yfWAQxHzQZE/3N";
+ sout << "E5TLT4EMnZxi00F6sV0G25nElE2t9CrGkLNxTUbK2sGKx+ybsveIWpoNtQty7hY8NF2KIICOd8QJ";
+ sout << "FsAKxGHbydI+9NV/8KyW2UID4JpoNJOQkh4B8pp/1bkBRPsikKLyowC6RWuWmVBm/DCPSIwkiV2A";
+ sout << "jNHVRmSoDO+U3eTMxbamjBV/H+xWgrBzBu+4aaFGH0MbKNtXG5COeCVMCtA5v9pmR65GLD/DYWcM";
+ sout << "JltMV+H82nUN8qDVTMpCSzrlkiv4Gmvh6b9HkxZC03g+IrBKAkXWkhIl3iYkIjLYNudFSUddDb6g";
+ sout << "/wHCk0lGJlbYim9VV0uRYJITZenRrzsMcb6g6Cm22cB9awV0qpixCGVW+jms3MfgcstzqdN36KPw";
+ sout << "C0IDdKjN2Bu9aNqHqWafK8Vl+oTYVU6foPJSOmHD3MhFHhuZk0oPtptRs/0aSZKH3FI+jz6KyTM0";
+ sout << "E9UDooIsxYAo7og8Ka1QCVel8cH4mmTBWTGLNNxuVwvHYgQc+j+QgKJ8DX3XzEHJQVL2fxCmm3i/";
+ sout << "tjJTltGK8o3S66OO3dN5g1KaYyxCDkmKjsGpyqAKGhdrzQzwLru4oof/b4cM6E/3aqGWH9pI27G2";
+ sout << "8jNYhu6r5LhYMpczurY9gssS94+RdUn4UFMt2zZSlpFsCY9E0NNaGwQ5sX0pcyk9r/FKWAWxT+e4";
+ sout << "b/buzfSIVsHWrzytkKOYCHMylaPd+juDOWX/Y1x5IBmR/VnpsIWbuYFjRlK7bvNoVcwitIZyI1Ku";
+ sout << "vmkH9u5YzndkbH9fj8FroFgMdZumIeRSFz2448yoIh/1+2wyEUXUvof32q5kktEutky9XtKCTIen";
+ sout << "LlWO9/7k0Kcz2Cp1S8bugmULKSLHEWMTtScZhEOl/o3jyMjLpbHhSfY5IHwZXVp6MO/bxpk4F4ur";
+ sout << "C2eAlsHUW0484VZFIm/GtgNRKq5H4MTRSmlzHxh0o5KnK87ZZNGKv2sGFoxhOKT8g9s8uz2ZfkI8";
+ sout << "HS0VWQ5y0Y9dY00ShJj9FShAuForC1EW8TBcgW2wjk7uN4CjXgupadGHC4hMFxVjJJ4tPj53PX+w";
+ sout << "KKTety47QKF0aeNAXeiNkzo0e/H8XYKYvyRKPpUhWbj5rzdkSev920dKjpq731kGRLUP8kljqmx4";
+ sout << "j/1ukvHqJrarb/U0LWECXe1mHUjehedJNCuDsXmlX3OIT4557z3W9vMbzKyu+0R+LN2YtYUFTdXA";
+ sout << "z442W3oqy7cJRIMfioDLTO4ry26sNo4uyq83j3iFx2iY4Wc41ZUGg9cwh5TVKg8XEh5US5xlsqVO";
+ sout << "kDR3XfYXA3GwuKklaNN6vImd+oP4g2ZYSl51f7tj5hd9xpTSRwIy3RJJ5VoTz+36jpT4Y3fnlppg";
+ sout << "GqBmWhJrY5UemTIoZbJ5X12NjQjW2HiKsuiCpLS9Wm0IXYWcRSYfiWYBLP+QZFyRA2VqVpwmY2X1";
+ sout << "EafYVxAjG2au8TnbfK+PLccuRg+kYNExJfD/hLUMyVg4wkLxP95L8CB85+g/1VomueeKJFnlrnkO";
+ sout << "ezCBls31aI/r4fMbdISFALkRwPav4rVwi8M67zuhxx/K97+5I4ONkaSU8/DI4SpqjfEIzl/y07Rg";
+ sout << "VUou00laGIhidjtfwENl18fyXGmjmLI/Mn+/H8gU1mW4Z0stSN/NkPYZjTx1AnvjG/LgaY1750yS";
+ sout << "4dk+ygLr07oWPhGB3BhIElS7VDxZnnPo2MFIPXTqWHqZ1/lNq8DE2EqgHgpFQGmp2MZVi060DA0Y";
+ sout << "En5g8zk1NXq0irzIv/hXYLbDEnL4ieulF+BlWN1oeERYelY8VkqgMtqGwBlwiO/qN488MVobHHAk";
+ sout << "VARDBpkSyX2bsF0KS4BCwybuQtNPVCaozYKWd8Q0RSNvsK72afBC+snd/y2KrFhcE4mE9ZhAwV7R";
+ sout << "LRR4IBmgNkDPDi7YXFEVZ4No5G5dJYL3yfsZy4b0kBEplbOoIjYxwz2dXYtX5Wc3hcKzRblKZG2i";
+ sout << "GkmTHabzN4BTwbGBxmCTbbyecAIO6MFJGlnxW6tQfdiQbcBbt1utUTpjVhZPVkGolN4VgU/qFPCj";
+ sout << "UyO9bO+RUapMvtwhI9+1KPcGiTbQsAX/V9+dSCjQIgD5sLRjfVQcKmK6/R0VSppo3ab0+XHDv55p";
+ sout << "FOPkhAKiKvI4Wl1JcKcsx8mwxCoTSchCxp5JhNn+WYBoINpTlmdRKI2hfXvfY+YXUzbATuTLKIZX";
+ sout << "IsHeRrzLNmntT4lzgHtEArSwEcYDRXLKBd+L13FZBV8iMX3ON8vUBMLU8QKoSDXatEI//h8RcI2R";
+ sout << "pOba7GU2f5TWFy5lB74tBKpcllmmid9w6jE2T3yhxU0E5GFWxWv64oSJDCfyD5GRfY7L2dOVBVwA";
+ sout << "H1DuC3NeBQfgaY+DPYFyC2gR6vEihtW5biK4HZoQkEHaBD8nREBdMlh8DcGuXwsTwEH1co2xFaNz";
+ sout << "53QpwalF61MYqPbQuFXZBvFEruliv3cYHUgIqtFo902pwFOK447zzj81l+5XzdVZHsA6dCGAjSqW";
+ sout << "J//PGJo3M48ERSqeURrEwNN6lD6nOqs9XAkQyGp0xLcv1/EVyzMoYTWazSaTkHbocIh58BOJVDya";
+ sout << "rjRytgcV9cAKzYvY9O3NBPvWMBSybUG0weBGTpWXNlydqxlAc7PBND1DfOL4XA6aDHpra2rRpJ/t";
+ sout << "xQJvaFWVNRYBOpR34GsrLpczGcf/z5hhR1gpE5y9//b26xf7V66n3kn0w2qGADZz4eL+Y7Wl1rIJ";
+ sout << "QXs4U95d6lfp26TVY7MsmQRf1GaO4keltA6LW8XkS9zXro/Ydl49AWToXe7suuJk6OGzaUqJImLB";
+ sout << "fI1w0xXDoVdNfY1SgepZyQxrW7PqtQUlLTHccsTDUJqVdu9ZUMnCVlo+6fQNz5lS7wvRbv5iqgkz";
+ sout << "DMyynFxFQvzk2L3sZUt1+xTw9r2d7urJ9VmGpj0arjR2+qb+2mfFqH0HaldqN+DGEiibZ7w9PmCT";
+ sout << "MNDZvjC0zm2N87yPuRBSbwn4JoAD979lNhFSpExOt7v2zucluinLIqwESRQjWnyun+xTZbu1MAka";
+ sout << "JAut97DUpQb5ALQ7TLqKOfk4vSSP07cVRJPSH6K3XnR+ZFX9W+7kb1mYRhJ60r3uKUYAoYJIdGqL";
+ sout << "jgbNfvqdTZqUOVq/Sfc2/B2T3kY0W6facFDev+/YnpwWe95pYSfUbewbM35nEZGJ0HVSRHnBTWIO";
+ sout << "n7C6Xeg9e29pfohDW3jy7vPL9HU7+GdvhZYMUfNeQTe0zYKuY0+/UtMIuMFzDJ1J9tBy/cLPuI4K";
+ sout << "oyPNxmTBGCcf33xcff6ZvAePZPBFgjmbV8izFio89if2qmyhGPDi6LH2NxYGpjC0f+aPj3j3H7Ua";
+ sout << "yX5PEPGDl+3l5jZjuY+sLwwqgrUV0skzdcjAyEbLPClOkj2BG5dELl4VcD8ESsOwyk7Yyb2mt44n";
+ sout << "GKgKNGm2+EwSNyvECcoEksksg7gaE6ZNXazytt+kRITYczq/v57+U7/tSjyTRL5qPLxWX5OwESUw";
+ sout << "y2zx0ulSrfH44+Xxr7ZnI82X5IgDehZJQvNPBmtPTB6JvDuUhJMd+hQF1lboLwEHAfZKpcN4v7FB";
+ sout << "GEZi7Sp/iWCZQwtALzUDY4YKGUuS7uOyjpHcQp+hxIlbhXY9byIyhvvVy361/nbVwOnEHo5BaKYE";
+ sout << "csaN1xi8WvBN108lpddsUUDRgBW7oKXoiDI06pfubDTDZHSJDABQlnor5sTsIQBMs35yYGuq0lMN";
+ sout << "lDJ5h6Nb8r6h2HhenA8tSBmMXoq3j4IAq0jUDpeR9TXX4pBbGfN1HgWpbIrAKmSh9L6Pxa/tB97o";
+ sout << "D5seIFPmORWWemSfAMoAs28YqCise3933/HPnCk83PcWH/4S7+KITJx0tIgF8ssoS36XP4J9L/1Y";
+ sout << "ym65j7ffiEEDH2rDgip/UQ7utwOHAIW6rOjkComtHS0F+m+yOcdoIwecU9J4rPGLgpq7KW7oSdD8";
+ sout << "5/ckw1VAwgUvvo4YT9forakPHIPB9BgroHxUjvbvABXJEkYGw7xUwn51NSj+7LieWTsE+IVs2kWx";
+ sout << "TAU1B6hsUPr3f4Z2g74JI1AVGc9KSuJhTtognYLM0amQd7HkR9Y4gmTRYYrSbE1yCWj/gYd5Sn+W";
+ sout << "/NUvdGmfqjcmItvBAkDr7lf79aevcKySCPfP5ZzDBfM6aJw/T6EC3KwBpY4obv/Zgx9dLKZhA9Uh";
+ sout << "jCKQEEpTnfOOGI7D92zvtySthJNjrGzN8ZVdJHyzXMSYWgHEElfM3bB3LdAe54vVG/XyHag1EMMH";
+ sout << "DnH9JOUvMeXHOLRDnkI0RNlGg21wNjl3HTxSiXkIwpANPsBpcoow32KWYqrygnB9iF30IdwfVTbz";
+ sout << "OhTNM/4qyrwjdzxSTX4IeMQrviMB+gi12mTcB4G1ggqXuz1q6uFqfxrlmMx+gDAuoEbR0vFF/bXg";
+ sout << "M+8PXQ/oKyGYtptl5gM50TsI2CxNaBAU7SUTG2zH9pDkoko6VO3mXfRblwFH4vjJ3XETsr2uAJlS";
+ sout << "7wOiJOWfMj9dKFMH6efJkuZPegH2WtkRjomYXO3l/UVkWwW2KuLJgJhAgqJcI3ODJd/kVYoR+THn";
+ sout << "IiPnJvoBXfLTKJ6r2lbjeImNg/CwzRVhDVVPJi401mloyrMU6JQ4DtwoqeiS6qAcLDlJMcu2A0bG";
+ sout << "+F1isgRc72oPo86rpVxR7oJDEVRSsQqkOhv8O/lVaziMsLCBuqXUGfuohPNE/+mCdSrZZ5LzkKSe";
+ sout << "iTlYATHr66c+jnkOETWaEPjUpwQ6ABfit3mbttnnONmDSnenmUnHUf20QyUonJGpFMsWB+DSPFs6";
+ sout << "7zI9eOAhqKQh///VoPYY37AdsdacucmhBJY6lmIHHiDyT804IvuNqWLSt5/Cko8t8thgShjeSM8J";
+ sout << "9med4U5W4XJ1fEoialm7jil6e/fr23OJJf3VJp8JEaibvAk+rbAbc5VIzwaUG1duo1O4783HLJu5";
+ sout << "4we8QCONtekxRwXi5R2gUi//qD1kJKbKxnkYOXaKNWkUbEXPOSy+evfT6jfbYcdk7VvmfA0qioZc";
+ sout << "B9nVWevTpoC/1sE/aSX7dqAWjOd5WH+KsReDpJAtB+uMhu4iyzaHV/gPuCAUdm16nVmHgcBP65Ix";
+ sout << "Rz93awPYE4aI2yoWBvJnIN/GgUJPBW5rHFcTncTV1LSUMePStawPL7BZaY/V/HRBUOnQ3V+p3xwE";
+ sout << "QqFY44ilI49X0t0OR04upM1hjEnx1lVyd/2bSw03lCDr+y6oNwi/hrk389KdFRtlxT0/rItCg+gX";
+ sout << "JVV3Q5LyY4WEE5on7coii0m/ZyXMxNT/RitPnLWc2aPtEKhbVOWpVuQGw00eYcKTs+AN1SuqcsrA";
+ sout << "7mVcPVIYhQ9/rDYzAzdG3HTcuiFrDWkGIOQp++BZEYitA7zEexC0xZPQZsqcKoH8RieRidrtPNXS";
+ sout << "ihFNGNNuxYQWVchClJMvEBrl1ankneT/fJOLTob7xAG+o/n1zdtSeTUtXPN4O4ym3GiubaONLzLL";
+ sout << "Z1TzkBa+H/t9NkI+Vp3kVRswJr3cEu6K607OPm6yGAxw/BwpQRBli6uf6SMAdNcAPMwZr9xV7Est";
+ sout << "pLz8ibkfNdfMj6fMY9WKJ5CJhajqg0WPFTlnSnaRs5ERtBLK9r8Ip/XS2VUT9/rqeFivpq8OsInl";
+ sout << "yV5iKygaW5OyZOtBbI4SrhN30LZZaoP4D4fjXqc5/EzHyzGCYCfgjKrytefR2F6CqUUdBOn0nVtH";
+ sout << "Z4xjlb6IBw80vupy3KEjpjsl8eAiYM9JsV9aw4Fd2hjCdeg6yPCNN56pm59Yamga80+31oINYjri";
+ sout << "3OcSjN6tVNwdf1Jr0s6Y1+0VgrXT+AbHuMkdPQbhTgQr/AAqHzUr+5FhrIZ7xM2vF3PrqUDakSkQ";
+ sout << "P8xIrxYawDr6fXVDWeVPOlVhUSihPBMHjc187YnXDd8Hun9Lww0wUuzOPc9P3Wb8wBTFY2HiXNL1";
+ sout << "ciWhFec1G2O1lNNBgYSeclowdwMNrC5z9lk0jhLKLrX2Ji+B5ypECjWGE7ZMNSuETIucCTh4wl/Z";
+ sout << "fLIB5I8Lx2D0asU17GjJQk1UdQa9uWdNgpG07osHpTH5FoWxZcQSBl7cvfkqXltox1ItArv9yuKo";
+ sout << "3gDp6AgZTFOqYhSdagGzYHdzB6KkEpIUJJlvMZsRzlSNIUtHJ4muh5SbP/X0AAGWnNjNZj95Yf4L";
+ sout << "IS5+ZQRnfrzIl7Nvb4KkxbQicPMrtXCcZkWJ0zN+xlNOX4Ph69XZEpmkzj5OBi7H59Kcw6ZB8yEc";
+ sout << "3SIw3oNS+6XAIMU1TvhPexpfDTyQNBbIgyycOPYaeA7eSgg6yz/4z1RfNMVZEj8PgPri6IzZc7h7";
+ sout << "AzIGqSzGJWAiWCtBFSmDQ3KbDXDMAaG6e8g+zzdm5dnujiAJ+s3PneWlapo5dIvjh4MaL3w6iy6w";
+ sout << "T62tjz17F9eEnJD8IM36+Wn13OSPk0iFfPKZfBDZPhEAGRYG7tzc/HKJ/d4m0hEg2GTY3M6pEjZj";
+ sout << "nQIcccSE/e76TSkeBNrZGp7lplsixpLjBdRFSFQ59D4juFAU/8tf+MmgtxWd2VVPU/mtkYU9QXzq";
+ sout << "JeDq/+MOHtQoMdFuxJvlEj0EE6Aa4E4Ya31LBEoSbp7ln5dDcP/R1LvqaHZr7+XU73GMzpgMec3D";
+ sout << "7UY6Rnip+AXpYOaWcfz6XX6y6lLA3kdIsNptHnc+f85kigFZ1RsCXZxugGLjxcFWXVieSKv6PwVG";
+ sout << "oQdmyR7KlT+tdjXvfGTP9AwdU3S4QGHi77l1FSZaebpelVkrMgWhcug3s1Ed0/1c55yvaZi/ymXU";
+ sout << "OYEtOmPmMAB5wcOagBZsTBP4/6w8Zrfy+27SKh0W1vD/rHQMP083Xsv7HCqWppSVZMWOGJqyUkUV";
+ sout << "rBnbjjEmLTyHXr0e5DP7TRCpx4MWMIFUI+fkRdrWSXxNJqPlST0J4BzbQl3XSppj3iURoccQBDpN";
+ sout << "VoLZA+61XgLAwY4a+1HGcvVRLzlEhHCmWEaLIeWMfIpb6L7U/LQyfG4nDiMqt5HtmMbLHhSf1Iqy";
+ sout << "swpB3hHI+UjL+bFOD5XkCRclzVPucjimVLtsH1QXWKYIcrC23dh5/tfoR4SUxzYn30LEbmcMNctf";
+ sout << "ETO3ebcGC9+hvFGH1CwVowGxDhQfxf8tjORR4Vv+L0Xfp7yr0Li/Q8wPnrIQQkbasy2O7PwodnVx";
+ sout << "W3iohD3htvNaL90vc5WOf7o0YUKcAWfM5ryT2OJoFFYCSJaI4qLPb/b9OLg2WUL0jV7lYLrK1mBE";
+ sout << "JJhgJRQr1OiI4TnIIWrUQsdvkrRVYiUWzVS6RsE9dLpw0ThoXtu3Bi1gWJInLknUCJ/yUPkNqwQS";
+ sout << "hKult4TehcOZBfHVc+BOtTdcLNEzQVWy+HPWssvhSNIYtoWp839hdGzzLoFxsIGik40aHU78d+cq";
+ sout << "ksDCHIFvnbgBvPkpLxrmvXrKofATp+ywoYFeV0g808/Pl4kX+27zXT6ggZWAO/I9anenVXNcgtvx";
+ sout << "ICdGbYeHkfXRr1/IVvTgtN2kaS4sSRbf9LPij72aJoftCIa5EknOgNSOQuqpEDidYdaXZXl+6tg9";
+ sout << "lz1qKU3i0ivIjIcaGmxLEi1pBM1LwgEYWcovusXNcv5+pm6SXUgVzQkHu0Iz5MJEdrgsSsc4NN+2";
+ sout << "swZHdmviqcDDIk7fuOSwmj4IdjAWUq5lmgYZbpZLZ0pPsmTjqX5uaBFXqmlpKVj/vEIKiFOCGtZu";
+ sout << "uEek9ZEpH8aTYqjf+tGKsNNANsDNOFVwLsdD5edStQS0c3U9f2Q1KGKXw16BM7pArxVx6KxFjI4D";
+ sout << "LQxcYx18Gm6V4sCn2J0ahj7IO389LWJQJBcfJNyNSFhfaRbVha6itGi8UaBr7Q5LvkCvV01WUcJu";
+ sout << "AsuyKjRBScPvjzypYoCxSZp3ln/sXB58RGCVZ6c7UXeZnGs2ABzXEIRYIJyrsNNVky0aGKSHFRem";
+ sout << "r2gsZ/RYPQBVw+xt8kGwAkM2km4waF7nHbkN5SYq3VIedvw1gU3UIkbpno8zJeJcwrnoVT9n686i";
+ sout << "aE/9ltlEfn/OW7XUGFK4jXB9GBJ455E/9iUejULkvx7iqfRsDnhbI7UsVDn7Q4snN82f65MGUtU0";
+ sout << "w9UaxqWKQUZqvP6rX/4u+2IfBKWAksqUG/Rl3O4krkxQRuuOS2KA+u512w/JhgdR/9O0BNG1YuBd";
+ sout << "C14QpgMmqPdGEfNrXUZN7uSWJSdBiwqwh+yPFVqoclcjencYDg3ZzKNrfUMun9eKKRBJ2UqPNmJ0";
+ sout << "zM06doKb85m39v5GBACWExd6vWsrP8JxcHeWtDkH3Bt6qhZ4YjU38qiTD1avmK12ti8n3lpzOpNB";
+ sout << "ObN1g9F9JLpGQs4RrVt5xH7Xv2LsAC3UxcKZMW8nr+QVc9BykqYIU96dVj3kffJlvfM1fTyAtTN0";
+ sout << "4016YMvWy9OzdbSeaW9c3ua91Eq0w9Ve7rR4D7rUm0DGwaPPNaCAQP41DDP23U2RkaV2yhcS2ntN";
+ sout << "95eArvlyyr0JKWkMochvrYl2iHN/4cv15vog9n/9pUP15ttJdZdqiE+qwBaGA+B78y7kCf4X8dAE";
+ sout << "ab3I0Gbc7FLCNEsJGcQlFTSmB+2ccRQtRh74pirUXd5BPNNQUEkZWXyD4tVcDCkWRqHbkxlneyqS";
+ sout << "ziQQlbRyiM3MmvSmsUOWYYlq6iKu3rzomWTRukwdFP/LbClcTaMW611t5rXM2Jrl2JXWer6HsRk4";
+ sout << "Z65qxcLwDmjokz84nvJ1zpNeDcCq19jnxNqEOaDIpDVQxtM6RY5L96Sn395ecZHJFpL/E1TEHSbk";
+ sout << "V9ZnTcZwiga4d4FaVDa/L26ckv1+93o7KzMuxgopuJqf9GJ2c9inY+Y0m+71i6MNharLdfuGkLwr";
+ sout << "/iEeyeu8K8QcwNyDDd8QfStXRGgIGhNR573Q6ARI6a9Uft8y0hUYrJSTcaryLZRnV0xYAGAq11p0";
+ sout << "YOLr8U2iOncUfz0+Cfc3cu7nytOEpr+jDM70ojkQxjU7DmdCCYqdgik4v093Rv9hTdPEBFtzNNqh";
+ sout << "hWlaCaul6E6Pe+PdjMStduOGI8+eNFpOJ7/K7IlXuQLMLcggaXELqeqzUTaXGnMigQeiNsXUhXJr";
+ sout << "7g9PJYDPXXLNYIR1TuLPXCc6L5KuF+fjWQ9CwUxT6F0xCBMVMUVHQSkNoqngCeaHsQbIjpkFBbxH";
+ sout << "BXjl24RA5TKttRF9mUgNUQK4VV9LU93FJFqPAegUWA8A5AbaRkJbylwWT26qCwRcNcMm+wVFjxdI";
+ sout << "BYvJqx7TCrvo5ytBIlgRVx1HLjUcyITmeQ9CCl1j/Tfb4RwslDCgCeW0LHocZde6lCdknTwlOre8";
+ sout << "FHdSxxvQAImwiZKBSxPYqsRLXEGMtcFxkpAjbUZjITfHP99qD+h18ywpMp1xQz1FAa6QBaDjFQUy";
+ sout << "q6DWqkCYI2cOpovwq+eU9y0HKT/CxiAclgINEMJ26zRPgJDBK2vYmft5gfB0CHj8zUzBuYCK8n+u";
+ sout << "6RiFrok3YKqfszQbsWj/M1nBKReS75d282S02qdrnm+OwlbRFmkUX5VNUsI48fffdLUzQAVOD42L";
+ sout << "O04+nlesMESB1w0GezPWmsG+eNiUghaLxLnptRrcATVWseeWuqcdcDG9ct3BkyPEjaNSZGbvqN0q";
+ sout << "8S0H1IjsnmKrctlK+1ELXGBaND79Uq4HC8NtNseb4gEQFBTj2rI85gXxRTqPwtYvB26mpBPrWmgz";
+ sout << "JfLIOawOFX/GEe3W3NelU+CoBpxvGv2wmgqW2quks6TmilBZaQX1ewR61jVhKaI5oG96e4uUksjW";
+ sout << "I/cXdAP2GFl20jLWia2m76GGbTimCyffGDV9v3uzu+lpoZ87JgudGUKn/jdJK1uqTad/YQj5t2N2";
+ sout << "Y0PnVN59l6Do19E0YwY5vx5vDXs6q0SiWoy/zuY2hqcZ7paY1iJhIaanMAJjFK+3FelY/IH0Xo33";
+ sout << "Uhv+4k5pPRuL3AU0nA+rg4JCT9YS3OwOpcwv4kNyQL1xRg2DnlcryXMHHCZPYeVApEruSVmO8nTE";
+ sout << "g9cbFMz5nmDHKWdF+KH1Zs9jMTI7tfaOGS5qwX+gBwcMGUPeHf8OMiJ6y0zXo26vHzLJ61wPWLrP";
+ sout << "juyapqVcV/YgQl9Ok54s8YGCv9ZB4rxnxjiABMls7ZZquK5kNJ2ShQWW5F9ibtec8EcjMuK4i20B";
+ sout << "tTgq8ymele0eOIWVE5AZZ3yD64qqbY7kuCLJG1LxvQRe2zr3FYVYqWhfyRvywRIRFcequso1Bc9b";
+ sout << "HKoPDpwJDaTrQn7uhEsb02WZKg7Q4XKZVRXxDGHHcBqwa4fnNM3IPScvskFpmyrZAvR1QrjNLTii";
+ sout << "NdjPVn0Klyr0sQlppC50bu1eCy/Wt14QKDUA8OFYOmA5mEjVGtFktwxy9wLssgpAD8LoyPQSxuzD";
+ sout << "cgiB2HqNtOwNGPvlE4/ZDfT/N/j8s3lk0q0cZmrAUCBXsBDiAHNbkm3WeDEBDY9+Un6fFF1U5chM";
+ sout << "tKKpyvjeZd4bjQXK6zzZNavXSJzOvqVb3OKlH6OTvP11rgv3pMdHYo9T28C0onwMHN53QGPsWbzO";
+ sout << "57SmomSDGYs+ERJRpGEVUXgj3D+Q4O4v/fR+XMAtiSOVmz1c3c2y7Ys9Pq9pFX3UF4q6DLIWfBmE";
+ sout << "6omLA6O/y/Y6p++EZleosnni/RH3hMH8TvZRYFW4EojNCm7Ss+eyuktlVXQhcPUOuxQ/lK1SUx4k";
+ sout << "vCBOk1YCMcl67xok/WdgM/lWJvovLTLqylpQlhsHIM3I8ccuOcJ++lhPABcaXmInXnPMEV9K20kk";
+ sout << "d08Q72uJPoU4rjT6SMbBbq8L9UA9Ba7U2cOdK5dr3rUZwQyFZBswroQ7A9cZnuCD4ugJ4l8AnUQo";
+ sout << "A7ghXFgGkmzItOKtHoFYz9HmxZa+3qX23EQk3jVdml/8fFh6VjpTK73RPKwrbZyclJs0pyN09eDb";
+ sout << "RjZ153ucSgBH0jflZbSIoQhPdmmi+xQBqWV+YjqVYzyYJMJypf5ZrLCb682KW0KF2dpleDJUoX+Q";
+ sout << "4pQNhHSZWXtdJTUcRdmM8Wl3AjY0QNsbjyqe8rYj/o7FJaX/Y8b85y1fGLF3qqPJDxZQgR+jKfTg";
+ sout << "0vlbJdh3EbV5L//jZX7EcOTDU+dvkXOSyx8zQeS+5xwVXWPmTmaTNIriV/6EvNJBPQ0vmYCjsUoU";
+ sout << "6hD4EOcBuuOXADFmEcgRZl9z46qgDwqRasacBLwpaICbLCnpc8Q7QrBhpbmeHsWmqYtK6SzfiQ4j";
+ sout << "e19bNsz4SP4zzFzdEpLl/J7PeNWURM4SUBwZcNOZbDbD8as2KcD78JSCs0sG9zWL12JPjZ9lJ7AY";
+ sout << "lN9vqJl+2N6H1VGiJR3eO8Zrb/lYX2LkSz87AzRggZxDXdv1DjnPzGj740McdjWa96DZeexzAKtj";
+ sout << "DVHwn2PFsYDKzvmwr71zUNYLxwcK2U3ayJf8nuuP5nPkRDwl3b8ttN9QHNo2JWabnVJKWid0Dmiu";
+ sout << "zRus2qOuzcXLkGbgE5DdmONYcg7qznU3ostY+QyHO4/UZbDpqOPG+uXuk3SVhp6yBEmO2yE3T/We";
+ sout << "WCWw3dfW4DlOTxb4m+nf29ST7WIBoNR8omWSyxyZZodAXRy6NfOnpkYrgFAorXCprqzNiRLSbi8e";
+ sout << "hZrbJNNoqEUTrw1R/hXMJHftJH8GotFVuFuXTBV0wcm9eM0UeN1cWvuT/0dc7ORsLdbhNW7X9Uke";
+ sout << "tYEHwobKiM0mfa1dCdTWFvee2XkrmYsNjHfMNoQRUN/w+1VHqnFy1Q/qc1MoU/C6L1rPUjth3gNC";
+ sout << "oNr7jNNWH/tXMAEGqsHPP3+Hw9pqk4XE/B3QSbeQYrZqcZojBhWcwLJQIbSyJmya3w+QYKqie3k4";
+ sout << "/LNyngQb4on/sr1vLNCNc4c8yJAxV/nMoNx5cVDBVg/HNAh+qwhQ9tVhi7xRUpIrAPICcaWaX+RH";
+ sout << "Wl9jEeq6PdI1bUaTdBvF7DgSvAriB22oHxG6Jy8X5WIycn4FHMF0/ZlCfwccg8HcjvZzDlvFHfbs";
+ sout << "lepXFAJ21XIOWHwDzG19VnizLorKXM/FmOnFwClhG/+yTREVbWCjaTkDsOlWL6JGOehVHckhxWRM";
+ sout << "03eFtiNGh2k/oqsqHkDxYtt0rMmGly3vLlLN82Eiijq2iNo19EF+euIVAl2h1iEmOGXQf6lkXCCZ";
+ sout << "yscrDfP3XEbe0grXP0+/ETyFrAAl3/zoENKR5MUYxVziTQ2cy733o/8aX8J/X/hFTm3ZFVKKmZRp";
+ sout << "vS7HzVzjj8i9zZNshaRzWt1jYnQxKtJ9w9BEn0VLGcb2spLOKyctfmsughG6DDA7wjjkmNDNPFMH";
+ sout << "mefa7RvWjXog4gPP6SiKITag5LwuBDotZn966sOaBOWK85QelE/NBsk4hCsf5LNPkMDFJYwB2ZUK";
+ sout << "g/+WbwWHIGFI2O/sNsM8W4xDyia/k6ZFpaki6uOZ48uR8uLOu9mMR7NSJ19gRoe/aHeOAi40FC8v";
+ sout << "K6Bs4rbxF82hbJmSfp64b0d8pNPevQ4X0UEbQI8d9o8RjmDqgVwV49InO/hobuZNWyY5sIz+8b+0";
+ sout << "swgWi4uhT8bsQvMswskKsWmV9bxrrQ4EJFOGtzCIeC1X0Kzm25gSf7biU85dr8/3dXDQcEqdL3x/";
+ sout << "BVrjeWsXd3ko8dQTJzD2kNLqi2yNmymIarjj5qzTxlnAZYbJFtFxtPPO4bfPDeRRQ+D5PhiZVZn6";
+ sout << "a9WUtuDFmwKmoZKZIaKQfQtwkMQq6F03sU/EsO5UuglsfN9gZmLVZrNR89YPC10gM3cSXmABMYcx";
+ sout << "OHnaivD81i4KmkX23r4rltxlqsgzdUKiGvEpPhfRwD1bKlKb+dTFgA6x5cYaOQ+2/KqeGn0JvRHB";
+ sout << "HWmQQG0aJvlLelva7sG2mahqaTpsRGunwr6EkeTDwSzY711r2cNcLBRq5VGIg9ODw/Pn2eMN1Sza";
+ sout << "0xBt8eEzdGYywXR7zpcalcJfOOKUdpm5D/Lr0Q2y9qGhqr0yambYW4ltxSreDuBWLTp9lxjbPmeg";
+ sout << "tpdUAqTqOEIfskeG6FVSzfTSzUN+q9BjZw4RE7aWCtqKm1M7hF7o9FRLMhqws47tamk3AZZTIC3x";
+ sout << "sGaCMG1/h/gu5bH/VQUZpzIj7KWgf7PnJEL/WhAsjjgx7XRcEW4OE02pwmhe5C0WQehHYOdTSTQ3";
+ sout << "Y6djpbeCYLGJNSW42W9N5Qvrp3GTYyqDlRcFgsZFDK0DmBonUAsSh6Ytr8pxPSNWAejairTkKoi+";
+ sout << "gon4K6TAdRrK8VQSSf+eWE9oTlcteftAn4iWQzY99aisuJP0MzOJr6gZgp6s2GlsaiAA0KObTlwm";
+ sout << "/SptTPhSn9K+d0Os8QMMXYHlhF6waJ4i2NrCiMulOp0vHYPOKmfyCI0+hQt9R3ArNvY1pqhwBHYo";
+ sout << "+PMNqnEJOH28aU5s9HyHPOkQOOSvMTYnUo0kOns0sa2dSxuaZOq9kb7aqV4qG6+ZXzFL9FbAhth+";
+ sout << "4FWqkdDxPUbYUh9pKNQucyvCOtJdlrQbrJgMOTHShadJl9g+eF5boVBDAMZI5WP08py+U2sw19IE";
+ sout << "/uju0H1VxjRPA4xips+lxnZdrgWQu0zG2nHeLObhdS/gbO3R25LWZxUILdNWpVbxuQrE5dRWlIaj";
+ sout << "aB5qbQO7zrwCwjB/ZjDc2dNEH/4lvPv8vnEehJBa/sIoieseVzcQgFRLK0n20sdaXil9vJ3B5qyA";
+ sout << "UUJBjOT9I8dtvWAP41Y53UOjkkuqWOoAbfd0sp1wxpmjOgzm3BhPoGmLhJRlJ58LkVYTF2Ix/Hwt";
+ sout << "ZvLSvOwNE8HdPFOU8BALTzAPtVpfQCKKDD0iOdAzDJC8NX77Ar/8UGHkgcJP+9LiycmuPnxOzhGa";
+ sout << "tsZH//NolNfZ3+4HA2wir+hHAjw0ia6d7OSIJyLV03kjAsL6I/CajovAsT8uAkegs8ydmvLryqGB";
+ sout << "+98uO61GMTuZNyrv6TIG0oRDpNiKywoYmXoqdum/Dj2UtsE0A+XAb9KK8Nl6RIM27j6vkFWjgC5p";
+ sout << "7Y9u+TIHFd18fxQ20A8RuTKooQaC12h11+nFUR2kAUqkJq0e9LWklG9jiq2kIsfHr6jORWi5b0a5";
+ sout << "Tdu2IDMtMCS/58L3+RggG0BAs+hc+uX0txQJUYEUCmJDlzReIAwTWj/5j6YRFpq534fp38muNl0k";
+ sout << "OV5OE0UU/K+TLtfXsUQ1QCbKOvEW7RcDICz+5sC2/FIGC0B+5KY8GUeQILlr33+LIxJTKAQwdp3y";
+ sout << "G5BZk4EXDjoLiis8OAO6TzgfktYXL/KwAicyHdRzSUAuzQ921QA8TeKMAPd+4UrzhUp3S/cgGxNF";
+ sout << "sF9DVFCaYR7t8Wg/ecyZ/Kn71D8ClkIigOWuWyN2XIuHgLVb81DF3gDdcoiMQ9qroTmrCbJCukft";
+ sout << "4zE53jcVbaVnhr7hK56cEbe3F1eFW6pEUFNLOxFrFOwyiwngw18cy/PUThn6kpenQoYS5f4FXPul";
+ sout << "qlFnVMTXc/j5mWhRSNXddfti53WsTB9eh6UI/MSWNE9z8kjdMUWn4z2perojF4VUavHqea9U+6ms";
+ sout << "samqyF9mL9CxEhDNL0+Uof3C+yPo4KSqMC3or2rqSe8IyrAbemjcfKGqMjsxBpSGH0YcRSNoHGw9";
+ sout << "wC6wJ37LuOncQ3VLJM9f8zgTpgln3y5NnKz9nMrgXE017K+nW+I/U8o0XdPAJMgXlY66pfkJsShw";
+ sout << "JmidbowoBQGAXWO35cYQ92Avtitrs8kvq0GIuUGdIsej9PogzuW1ZLTSNJkee/S9t39cXyQ9YpzC";
+ sout << "KSj+H4uhCrGCQJ6Wts6l7kEhHG24Eu/mwSEw0fTTNqL5maeimxJsOdmR/IEVC/pps38ZEUuTJXuq";
+ sout << "J2uTsoZmiVs3ImbIkjHnN7NF0I4ryYf11Qd4ULqa3eIjraoiDn1KOZIJK41CkKOYAYIts6aFS2iw";
+ sout << "+34SlRxtIsIrn29zkOG3L0yc0o+fRT5gXHXPKv3eVmNca7kPGaqSrBzv+c1Fz6ShHK6tBjiPVtwp";
+ sout << "S7k2/05j6t0tMny1O3QzQZDTdaAi0Xx5yg+1oyK3M8XrgLsEU5aN7XNRorjMSHv4XT+xhe7F2B7V";
+ sout << "//JUutejh1StxytV/AswArYDSz63Q/7Vs1Eel0/Fkc/Rz9gYPKDqYo0UAFi/baWoUrSYZjYlIvBb";
+ sout << "fQn7nAgDTWd8Ry8dx/j8kUSR/CBWx+xz+tNPRl0TjIOWZJtT/MOqLZyODDBisgCpV2en/f9VUr0o";
+ sout << "V68sh5TYTczlcSA/lngZLj3uNH6n5Vh7u1kjINhOiYzYgaUzbruQJycDF096vEGTZ8JWnz8PNzT+";
+ sout << "gkDfWt8JtyCsQg3UHfO58YSspPbapErvM6zc+nOzOde7dLkjj10pxRtQMGc+fES5ER9QLpNF8wIR";
+ sout << "zGF4ENLZ63jytxO/Y3klAguefc14abKtLaOi/SfWseX0HoNpW+cA80GVq+oYU7krKhh4n7mg7NHX";
+ sout << "PBsUV4gmzhj0DkVAgmaGd9qT9KlZPWHU4vf1pGE5P8mIoDIwWmpFsN2sz/mEcugGWAT2JhT6CP/J";
+ sout << "sPOCAB66Ln2SoLzSJvBQwmtzdBZm0a0NF6TOloCdC18cJZfqYFuEJm2gUozzFMPjLKonOR3Zhr2w";
+ sout << "CpBUdXL90wQKDIoTIjnOIorXzjFZv+O9O7y4OBDpjDsFrcdQG3AZPtHcQU49W42aH+6XcOUqlxRc";
+ sout << "DYsJdg75GZliymNIWgfGGuQxJ5kysL8iPjmCxVGNnXM5FvYW/JYzcXy4A2DXqO7eyVBw6KXKHxq0";
+ sout << "SEF7IIjjj0TgUp7tLKY7/L0aIVuvlNblobJNZDAHYheG6OG4dAAnwaggRO/dneeiQ6hDtFbenFmQ";
+ sout << "SWdiYK09ZQIpXbdlXUlPiJIG+mtHudHcbaaZwVlYS5iG3NIOx/IYdKzYnStqv+5NpkBEPySa0sYy";
+ sout << "npsOyO7Hz/09SMXbSOo+RBkLYrYXiLtH9qiSj+Lv2N2ueJw1fN2PeCd3I05jcLJwqCEsZ0ch7FfZ";
+ sout << "1MaXVqdKYBD3BxBs+SY3qtHgffqUrGXDIbFARc807a65++J1NLuoD5ZXI4empkMcD1O2j46tQqdt";
+ sout << "MtApg+jZVUSdVCJl9YArIMwOuUdgH0oPcOItEBlmYRmDDcWX0E/fuPfa6p7u4X6zwQNY5PdI8nG9";
+ sout << "+Qp+PhYXKt4fB1xEivvnHXy1x3xMmDwDH7qyZEmvpiNPxZSO5ZGUbDdzUH5AwTCxVRA2/Yd9B0GZ";
+ sout << "+WkLsX0Ds1K55I2cTSN5Tcb2IgT2VQnFb7ceok9bY3ABeIipxABdKzhLiDeYDfeI3is/pftpDxGC";
+ sout << "vkyX1ZUnB7cpC9lwlwwbRdXhQ8WidAEFZNmt77xdTVeyU4cVby6WCBelfRiKd0+wXtQsSp8s7iRY";
+ sout << "rNHSq8FQwmt5LPdhIsvLlD6254b5AV95lYKnriWhCJ3frsNaqWc001OhiyRT2FTtYIb45pxEtFw+";
+ sout << "fY3kLR/xs0V7KWpXJCyt7uI51vRNCIlI2PbfEe6epAMg7oLrbXOtjJ+z6f6C5DPlVUf5pxNNg7PT";
+ sout << "EEGuVTVEkWOD/hKQB6iJfhLqIbgfOnglwI+pR8JzGJSJg+yuK82g9rRvzhPvIQm8Y/xmsMoQTvg4";
+ sout << "zBpA2kgegUxYS+0VXG6ZEbO9CjSdmtUvcIpa1yw6emAmjR1TgiDqtTtQVWksXhRNiQ2XBal/YlBE";
+ sout << "+45tqfGRjSiInj056XBdqsQqcQVuXcxXMT90/uslRAEV7oTmVX/XwSahsuP1VIevdVI+rC4liNj8";
+ sout << "yWmDEdVfQOM0u0MgevyXmDj8BMQHznlHulIVNQRhEUmX13ibjVIDLBHDyozBbvPneuY7hBejxnV+";
+ sout << "0kaG1VGjxSaTpH+jOhXwK2UvlXPTN+FM4UPLS93zEzEiJ4EWDUR1wL9VUXEfx7IveXQSYixy/rMy";
+ sout << "zpSX+UpoqFyiiKaG8DGxg2hu4CPjNXfKVI7/IZAFIT6KHcxPrrO/3BZ2HTqwpPUpD7SCiVQkqzwd";
+ sout << "gA5zjgo5/vrSFtah1TLBrd3PiN2RBsGn4EzHBdR2IIrDQcE3uB3nEQRyWPhRcO2pkVry8RtcGh4b";
+ sout << "hkH0XKI26GAIC3cEuRzdwWU70JlrbuN4beXlL3z3xqxz6J88vpg599N6Offn7V0Ki/z0w+kIgHJN";
+ sout << "H2CSc9uBNeKfQMGNdQzutSckMpg/9neWQVhgnegiV9hJ+L2lQODdnUCJp6nBYPHqkgtbOAEFRL+K";
+ sout << "tkKQcyc7YkJs20WUsItmeW36T26+0T3uyN72dbmGqBaZIonm32MKPgh+sVTbUspUiLdPGSFz6zuF";
+ sout << "89/dwHkVzv/dndOHh5yb+itB1LaCjsDRmdHw9uBYto6eJDdPa+XYOBe2w2qLyOHph/3NaHrQI8ia";
+ sout << "gfvPuVAZ+V4XXQgXAaWjPF0XtyBmRuEm+C0J9nWznqrKjja+HXr+hnmlEuJrHGFIJeTcpNrrbcRi";
+ sout << "RBdh1oiBTCG1tipAQeDf4fqRy/zrvugJg5Rx6/4fA39XHXW+IjqLMuOpA1qCpOWf/4llleXazoM1";
+ sout << "J392TqpJVxxg43Jfxehz+XXl//IZPMCoiNeuztiSaWuGsw8ZjBAc8mMnzehBVPv28axiOixuiofH";
+ sout << "WF61WGuAUXWOZ5Jqsg+7dMuHhTZznTPwGVj6hzw0cyL4KEoj02zic8JaSnnprKzhM442UFIADacs";
+ sout << "m6PchnrjUqfG3D9DwbGIwwmVFXePtscyV6vb19r5nITmXaQHm2zJdkpVmwe8rB0Vusg+d3Crpxe/";
+ sout << "kOWVPy11Gv/rkYx/1BsKd7SQJfTdsZFABUTjLIaJb9pmry7FnI+pNxWNfIyEu4wHTyNSs8ZWPOlM";
+ sout << "0oXY2CqzRqdnzt0rOdEp/ncMhjN0p8q93nMhfwSlH3Fvse9KZ5Vrk4Np8HDTOEjjgCQO3vpDveL4";
+ sout << "S53oq7IfTsp6lKSyzR3h9iYqa+fnr2G5wDoIlLBPIRXOx9+l9OH64rrNITKFntHEkFFJi8rnkr54";
+ sout << "0892mQk/ia9ELfn7V7pxKnEgXnSqDwldLYAWSD6hhGmvUYEvEKbS+4/D/zsfnauYNFkc+cdtPuFx";
+ sout << "uo6oFl+6Th5VYBwrmk7xq7j0vu9xxY0NfYXkRcd2gp7tuxAtMn0gJVbfu+IfKW7cneAT1fszraXX";
+ sout << "+qVo8SX9+d7phb7a4srtpA6513DcZSBSObMJ9RT+HZeYDn+3l986vZGAOJoQMGIohkU79sFTdqfR";
+ sout << "i0lfN9X/JohokHdD9eroMq/F4vMWEDj4ax+OR1wvJ1T9syDfS7YT9IXXt1eXYL78aHwoCJ352IQf";
+ sout << "Rv3phfVYK45OdBn0BqAQiUhWdQxPcwuvt9O4cs1JdChTjSSGz7bKtOPTBxst3pj0butIF1ORrs/h";
+ sout << "n9iLG4GVA///xeK2m1OFKDuk9AQK8hX+Nm6lEu5zKZO6NBJjSzUX928LJ+Wx93h6F/I2sy6hmBNS";
+ sout << "0bhas2TNmGX2Eiu/EslEzPudjbp8uBKLCyHvM5P0kiOdzOqfDD8L0qyuOLNumauOEVB4EuSoROZg";
+ sout << "uGPoG2xyiVGdkKIXIfvb9YjLpU3mebY73zeInPcaQCVY0PcbvEYIs/wMXch9CU8FP4wyJRYsp5rP";
+ sout << "XELbGL9uDF4FnhmFDjVOTX5j/zV/jVNNWOv98Ue2JkVZ1o1gMweglPMk5Gnxn+twxl/OY785uZkB";
+ sout << "CJdsVSNbXanSF4dFQWhkEsmjXyA0PVsw7uD0tfIaHB/BekjR2hWcwbUmsAUzIm4EVvepSk3Ec3rc";
+ sout << "v0ZNE6FO3KD0kNueAt8RvhpxbO2AOL5pm3vCt3q7lbz86dalSf4mFuv23Di0Dyl8iZqNAHzsrToW";
+ sout << "50Wua4dgrUEhqSgn8ohZq5Y5tI7m3uiP2LTLAfVDag0TdA7IAREMvTT9F3N2f/2Ff3j3b9HW3MEI";
+ sout << "2PU2HZOvUzJyyOfuc0TnK2DXpyXwGWfgoeAQ1X8c4CZhEfecXs/FXM+BfUyJEBAxnAkHxSV2oKW3";
+ sout << "PPismlBkfTS/XPxnChKJu57uy88MTaL9cEfVexPwEiCWC8VOBAd2nQUYouKadJLVx2UliM/dKQu2";
+ sout << "oYzUBJKlphHkLMGgUUuhXmGFa0KFkpisUVwFtyX1ey5myWvdyjm3xGkJJW7+6KzKIuFZ8oF6QEBS";
+ sout << "thWf7/v+I+TMG2FI3cl96g0zhRnguGVWeQ1NCzPRsDmK0fDZcgnCGyQGtQkdVgRBysC5hLqgnq0W";
+ sout << "+2S+oRxhL7AY+DaVGOHfYnI+7jyf+NVcHhBfqw9qmVH/rZk6cuG23Lj3nS4IzdR96a70EWufMtHo";
+ sout << "Mu1zlfyTEcZ0HgZ8vZdaBzCDRT7nkqSou4/uomjQzFUBRkZYD+etcrNb70zi1Pa+AR9kYXVJIO04";
+ sout << "TTRLgikAT4Ja36SlxjBbWDS0swHyWy3ecHhdKwVvZcklA8yxzLvdBbiANv51v/o4O8vP2AAqAQV8";
+ sout << "AcOjIeo46sslQzYC0aYgaigzGmsx6nYGgQ2zFH8bpWR0KNnP394GdKHx0AuIjmC1/FKtN0KVVQeE";
+ sout << "xcU6gbb8Lbe9Mw/WZYDiGtSVXrNBlD59+0dNG/SAyDZj1LqU1Fp0thu+CzID2LVAJtA7z4Wlx8f3";
+ sout << "t3iMWYIq9+3fDuCta6hORK3L/FUG89sB3Rqbi6OaYW7nu60+tm+qRP7ECwoNcyVFtGLOOMDonoyM";
+ sout << "uHGfkqQcEeQaAfPeWl7sEbnzmBbdqZ+xUj63WTpGQ42Ceqaa+LeoUJT3zdt8tDK4Hc9j39rZh0zX";
+ sout << "HtwdVVVlozsSQO6HvvElDrBwO8EY/YPczI4dUGCc28KcHNgEQQ+M7UenP4mbqoTIbo4Q0i7FCvEu";
+ sout << "9/TrBv0JQ88ae2xds6Lk5xsQomtH4ITr4VYliRfDO9bD60zxQBhhiHo5joRQ7sW/t3ms1tDzsco0";
+ sout << "YG4xVvyLzUHdDh4FBNVXJ5ZHnHnTBnppmNZ3M3ucVQpk3mEXauKEdPt7AWwdX6rCqNR81/ZasFJh";
+ sout << "qTU34x7ZH4Lg3Ut+95CHI1qubhh5W9feqPOLbvUaJzuqxlcsTZBhY43N1PSCerMFFez/b1/Rtw0m";
+ sout << "OAmNLDlwUfsqtrs4q3OnKlfXyHlwuIDe3fClgcE4r+9i6QSjVzPDaPfRwE+eTFRKPm9uhlLQYAnP";
+ sout << "YvVqOtnUCbLQod0SIakYVb24cwnvuHfGwx1tikIizumhn07N/8LXxtVGth+prxi0Lu8b3hMI52e0";
+ sout << "LXdzbJfXUa44mXoyEL0ARMyhaombmUt6BX/HaBLx19n6j+munyoheRlpqIdIMWllIAaEMbxCoyXO";
+ sout << "ExRZf8NPFANOB0hvtXhN5U17hleB7U8ri/6Uf/j30M+jglbucV015gMXP3h+E8qg8aOy/RoxhkWi";
+ sout << "lJBmD8OIGytd0GcoSmpzYzpoOQ+DVNMvVlBJxzG2sQy87PtAiJW0u22n5LGUVEQLxAJrRDdEiKcO";
+ sout << "zp/wdKOlFD102SjWbLp1mQIrqkjVgGRs+M8kIzTSyUw3HQPOvucyewaf30EkILRHwNOpUq18WxRt";
+ sout << "dE3BXUtm6v0Xc1rn71+Kbka3pXmU3qoLLQaCLLFQXcDBnKGt0olk84XYOb0/cTlnxBJiips2Q0z1";
+ sout << "CD8+ny5LOekJtzcze0PcYimx7Rl1EilhFrq9fF2N1cdyF2skG9UfoPVor/6U4dDGzN1qTzN/uflE";
+ sout << "harEkP1c3/Dk4BqYx39Vym1nvTSs9dYVYli+6K9EPaxjUu6ng+3ad48uZWE0xA5i8DfEYU+VBYSg";
+ sout << "ssaHO3OrblCccri8HgN1U17xnDR5C/zBxpcgoQ5BZfs6GYsjsDmIHz4Kq7dCvZ5xXStqZ9FEPnNf";
+ sout << "oeAMmWp6H15Ci6pqj12u9lZRG4rj1kQvaBvnpKgWOiscli6Q6mCTEsgtIYvmQhds27mbO6Y+uwDd";
+ sout << "ZP01VsjBxmPZaLxpUabmGIZiwAsAZKLN9qQgRt7EdMIDsyLehadWOzcjiYTHrWTrQQ8R7zN3e/eT";
+ sout << "NYJ1wsWhxMslZ+Q96vKEmejIyWAvcVmxkCuiifp6VfXCF8BEYRsf0lY+Y1VFe7yW1BaOfy+8Q4z6";
+ sout << "jtBh12vD54vp7dOo9xGlFYZD/w3KQKdKyVnnqm/tbT/pCrc/X6vbrgsf7sIbV7nMUJ12FRKYYX8J";
+ sout << "p0C9T9nkvrPZkxNhUKIMkra9+NT4tMsBPw4MyQQsMmyUjLjZMTSRiIyupPjo8U01tXEXElUdaNxj";
+ sout << "feSkHmcQS1PBXV8oQfQnwEIXLGUX84fe6q6i46YrhrEhjf0crtCdpYrDOuhJPAKnu46i0sHS+xRG";
+ sout << "B6/685zGr8K+1ET/1RqwR7zo8t0vJf4zqhAY8OpWgvY5ASN2f0gK2dzBMfuqrai05tWaJaUhYRqL";
+ sout << "7FufcaIyxbmDc1laARn/mvN6pYA8C0VxexvsQqF0OlOhpLOsY31GPy722kMJeKxFLuFYYp9DeHQS";
+ sout << "Br1KtYMU2S/RnQ/sIbzZ15aSbYstM0K8J+9hxHCi4aq5K7KGzzNCSsuMTAz/LEDEV5UKCFd7se7E";
+ sout << "rs4gwaKuHqH5b9n2cAb1wMpM+VMttHtz5oQplOyrZaz1r5sUV2KGkuU4oWHiHKSgu/tvrBvU9LqT";
+ sout << "1TB0RV405rPe3HjtIAzSPD8/VUHZy8pNBjGi/vqZR/ybRNP8GQFWZZ+vt6l7YMX83DqWbTbfk4oR";
+ sout << "C06g1YnZcYt9UsV0GldCGJQZ5oJ/UHLM4bfRzSaKiRAWTwOuDv5hHL+AKJcYOvDub5pCp+EsZh7h";
+ sout << "kMS9v85LQNmI5vUHbhrS8XE/2Zuki86Oms0AtR3aVWWGWM28oQopU35Ayu0rQE4oexb5NDjsKcP5";
+ sout << "A38n8gS7O4cbHdQ3XAIQaPoUx6TVK4xL5Pc5bUJ5aRsyivvPGd6meP4AGW8YBKObMMTL6TSRBD3R";
+ sout << "e0B/yqLXd30yRNiazNfw/aajOIAeHfbNuFvNLeqe7IKdpRrDyLg29Fh5n3Pg4Y5+vdaJigi2Iy39";
+ sout << "FKvhs6zp5SE0tJubMJuFDUfgYqmW44XVNv8z+ZUIJCYQScuSkCfMmexS3LczxvOTqTm4yShuNr5A";
+ sout << "+o4wXi9n6jEa0pcjTVlU45mbOXiX5ELwfbSpYDorlP2BJTZqMAvnfMdxdX7cs123ysQUHxMqKK/Y";
+ sout << "fSUaJORhU5zWNpmiSFeTCsH0uVuWMj11H2ttk1+gotSz7I/RCvotGNzuS7SqqOZzb3h6LpjHtplh";
+ sout << "04W+r6/FtyAq5z9gOphIZQUhoyO2tvbY7aCciCBBX1lugJWKgCQ7Ui60G80225lAU4b9QKOGzwN6";
+ sout << "hqKtxP6c9Dh+rxiERh4NoE6rctk+CfBoCAxDOEFVM/bcWlEbed+KvM49mqrDF9epNJP81oIZZb4i";
+ sout << "afVphDQaM8LuV9LNhPSMGLOoPeYNPAxyCHnlNyAsOrZK7otQ4lFc4dLM66NFJBbSA5iBTYaW6MqM";
+ sout << "qGCTCWxu5MzZbgNgwMv7QrTgPy4LBH1qwST759B3Um80nBimBa+8HbddXPbsqC/KzjW2yjodKmqx";
+ sout << "2t6vnINn3v5Q9dFFKv4oaetAj2dqSfbfCAvcMvS/Tl0Ont6PZt8iFpH2NmwcEbrwSY/2cKKqZIh0";
+ sout << "sVj8a5fxwXHAkMcov5J692QQf7P+OUzmkI/P/+TlBoWYW5Zor+79wh05NnrDHqdVMY24aSRKrdIE";
+ sout << "qOeME0ZEnPkMyOHJRUzHoPPw5+r7Oowvbske31mERKc3Es0cCjhJ3bab+OSaoXt2xfzgLd1e2eXm";
+ sout << "DtOlfyyLT4vK55WTN+IP8xpw+e+D5N4bdGDn4UC13jvhrviBYxasYYU/EtFWpFGonD17FPZ22PAX";
+ sout << "pGNpKbM9Eet+Cabzi3hJjAXwKYCmTXHk8+aHceC6DtWExR06Dj1QKoxSJg8vcbuik11BP16e8fEB";
+ sout << "UiPiV0hzsLwtHdbg7uZQgvd/e8qoyivF/NqZ9e6A0s4r0NoRO+6s8OPAL1QTpavvLtDhF37g+7HH";
+ sout << "vjGkDbTbzU2TOdzl0OvxmSezNfASeloEOSXqIkjMStawtwC+DgLRXzMhACjjVuc+MY0D+/zGt7lp";
+ sout << "hQlKeLuLWLDEUYNn3Dvb7G8/6beGyyjKHlHSOr9WK90JsS6OQT+xkEZnOibrIzgZBwmws4SPrhuf";
+ sout << "wwwQCiHH0xjOXF7iT/vi5qEKbJePKxh4m6kbcxxaPmP56M7TfmPSYJ62RTfZbnq/FIPurztCgSHG";
+ sout << "jmTJvBi4m1qzKgiwHVBoWjHK3wnjQxTuerxN6CMuTA8mv+ayvrabdbRwpVRJgjSXbb/Vzgj2e0PX";
+ sout << "IeHUocfZtrz9J0D9f5D5nhIlqv/aSodiEn91GhsasFWwDWwSdvKwAnTtCaC8EF/+rLx/qzCneheg";
+ sout << "4gGRJ/VgaQ4ib9Dx0+0SnOEWoE0GJAceSDJImKJ7yi3Oo7Dxy3J/tsJFST+JKNuakqFkNCnTetmm";
+ sout << "FkVNEXbo8tKzbeuXlextGxSr3ZCyOYfxXsfNQUJawLDxpL6fVpaDbf6Ot7ip2nejQHL5wt1AqQIA";
+ sout << "GNwV3Gszf6q/bx3vXKCTrkL+AFc1VmJR4ZShMwVlVz5pdzMWJWmal5ivqLFkRQ4q4HXA1bPVhS8O";
+ sout << "tfnYwzNTBMRaNMpsKGTL7MOOHdmXUHDPKhoMki5bX7Oy66C0x35qEGyTozFQgygLOMFe6eOleXF0";
+ sout << "D40Yu6VhauEQko7l/VN0rICXaTJSaemFIgmPLT7lMfrd3Ta6XNKYxXB449SOC63A4doQBIFJnekm";
+ sout << "VO2t09QLePM0/ztdbA3toWuK8cTSNZgelZRbSlpgE0+d5rqxjlxmKom9I/8kj5bjS8FrQAbbsnR3";
+ sout << "pLEArb7kuWg3W3Gc0pdlONrgWVvoYpTtwQsrY8QfZmIVmQ+Dkcz4q+LqfYRU+TEIM4Y06Gm67JzA";
+ sout << "ncMkUYcVm5YS2Adqe/VFsctKTliHEPhPRFpo9Sfn7csGTexvc+IYB1mFIWy2DjRSsfIorQJ25wAy";
+ sout << "FX+E8PlVJPf7S9IKNM7NmNX4k0aqJYJt9VVuDidECJrYiTW9vsJ/SDM39qR2bQ01w42aG7/ksF6B";
+ sout << "tWPS6j9Gr6KDOtdAK9UOof2Oq6uI1ENoajWJuDMxe9fT3J9/1ruJMMDE4dn8eSXCi09ip7qbnrxv";
+ sout << "5splSpCC7MvfBr1bpzbQ6gsZF7tPXz9R6FfM1idioIEJITqqaaYdABv+qwad9Pwv84maHHslw8O0";
+ sout << "/SPCYyCpfOAbbYuVpw+cEryeS7RD7LNom1M2Qo6A5Orh6Qmd+xTExUOJCxkPNvyxUDezWtVCPxHk";
+ sout << "lu2vn9lqjj3UaUjALvD9xB0WsGhdJbOUtlQw3e/gEloGvQ8rLamwCBAelQqTTwM10iHkvx/AzA4p";
+ sout << "TZSpfZOyXY+3HEjROeej+gvP6VXL1lWcYSrEexizEzy2eN/H7kVTEgnHOzYwp2A8x6vlKHWL1gnU";
+ sout << "lGV1R9H2bzmhj/iGFom/TMYWmiv6mtUe3PTRTIo3W2yjlLKnBiBFhWMh8TjfcLz63dadzCLEV3hL";
+ sout << "D9DPn8fnJYh2HZyd/N2/lVDtEDnxqyd8C5lZnxY28bew7dB/n5euYTo1H6zWvpIHlbbbmXrHd8VQ";
+ sout << "iaJew6T+G+bBxuUpUQlu1NscX6mcHZBoT+/g8Ng1Q8bmjiT7/c+Pl7wpRlF/0jwhWxPIL/5mlmPf";
+ sout << "+pHBfYj5BWXTDrZNchJKF5Pvwdnl0GbmJsPPiQQ+Wvt+y3lt0wv1gwefzFdGhi77EJyRY841JGSh";
+ sout << "LU/TVYCUFn1MdrtVLhGdomxo7Bl6Zj4bZaYlKXf3Qru1bRjD0Ug2tIePfjX6TPymfgxBkgGQ0zVx";
+ sout << "dJuLeeRNmhIFVH8T5jJTMcmd/lS9u0hzXrEjvpSFKPmH+Fmv6LJF/D0bl/V0rYEI2gKmpAWJLd1M";
+ sout << "iI6/XFF/bt6tQTRu+g4WgPWk8HG3VffsHUSAI7jDK8hjrwrJm5AHIa1a/PRxiSCH5Hi2EHCPRfkB";
+ sout << "grfcTEwLwHJA+j0zK0tV668Bal1NBw5bJGMzQSONvQqB/vnVqcRDxhCyztjW4oAnEeeBKPlAmAsy";
+ sout << "MlrkLbkVwjiaYbpzkYCsO/KlIAgoahAJEOLghLQGBuhP0voDuN99bKp5JZFaGlaaycEuYFHyk9f7";
+ sout << "zo6Xfa+hhmjfgWJ8KCFKKvr50ndwdqRTafQVxjZRe7QkwYB5ZIpwYehKsXlr5SfTLp+ANmZqP832";
+ sout << "yXRF39OZLGs7xtsMmPdI6WX/xsOKiCc0oLHdzH7y7tYzLVQVSFdRZtN7HA2r2Aun8gTbLExrbfRW";
+ sout << "+16+go4YQEubnMwW3FeRQqbpM+0GCsiuTlBPTgnvNEWi1n3JHGHeJLs/0HRNo6ba+jVE3vIZO6B8";
+ sout << "NN1s+3CSDGA0uqpGg/51P21F/Q7wHDgQDxRuTP1KQGNm4u8A7Kbdw5rGZUb0ZREWwGPfUpMU0qXi";
+ sout << "TgY2XA4rESFQRvn3Uo8jPP7UOZgKM5bsdToC7Rp3wuKXJwBlmdTZgdly6CmELOILvHVNtGHg4yxO";
+ sout << "XA9Co0RF7RsHOg1b9XzfeqibiK27ZiQnZ6sIvl+EdmsnxOY+Qa8oUZdrQ0JnFUiZjC0+P0760Si4";
+ sout << "i36AOc4h+GzZ6WbG0yPZlFAFEcPiBbyOCp6VoY76H32BskEuSBJx0U5+iFKvkvK/6lyvr9WDTSX5";
+ sout << "n74qoW6/CduCm96RcKbi2ywZ0Sk8vi6V+tWYUnrDaepg/iY84UA0qpAw/QsNZN99BqOlNEWftRVP";
+ sout << "SZhPmpxzE2xg/SLUwR5y7NRijA8QalWWQf4G9TotViLV1IkftS1d9TvTMb38vGW4o55pWw5SZlzW";
+ sout << "kFf9fbqlQ5ktTZVol/v/n2pwBjDeKu9wbNFlI7bhl2xhg/3nN2e3mW0HikB6K81JywLpxL+sa8+l";
+ sout << "SW6bdGwbgj3BTAt9evqejNkQHVb8ALTkmZJUcPP3CmW0DWrKioqolNXrfVRjl9DLcxgQfhG7osjc";
+ sout << "zE7/8gU0JYIk9Tr/XDHdk9zzOsMZAHu6nUbKpIL/5mytR193QuNE3x/eCQpsG1dMC/alF8EGn77V";
+ sout << "QK8dhMFI3BQCXlo5TmLXd4RWOUmsNGt061t6ZQ/jvP7oPRskGSB6YUtr9Z6CN92wbnBUIf5VFJPD";
+ sout << "qf0mPU0vKaMsbwQmD8+rNG/bYItW8lRAZLa4gJR5r6lULMKvcMiHeP9742lYL97T5w5VTo1c0Ec/";
+ sout << "YTUMJKSvhfGPq4IfdtYmMOX9pmqd0VKlwnQ5w4pKiGRY3lj8zpXAHjraMA+46wTo9ZJ7Foq1XR1z";
+ sout << "eCY3xmnahD5uHBR3Va66rw3W0WGRgbuCk63WTKDULa0dWuvNw0RWy+MkZzbn76QLX9dYGrWYUZWM";
+ sout << "9mnQiC9yDObAz9vRcITf5PhEKduGFQgAl9CU0k/ZgmSmAYZ/wFJTb3U/roNqnptdm/KGuegjdQqT";
+ sout << "5utMQZwwcgV8DCuE3/Y9DZcnEMoeSjZF5ugB4Hw30EJt5xAvCUpVRA1tygRwwWD5XiKt9srDjBrM";
+ sout << "nmkQCbblTb/HTPqOvdkBO+4yHSGtpXfA3G0DdSFtFPO7OTwPLrUxf6rM6CQTgpzP6/eoupLM/I7z";
+ sout << "tlfmNGcq+JYCy8csjKaNlPTJextxMVqNScbpeYwG/9SpjmhqObv+oYs/uga5UBWSyS5ls+eF/Tph";
+ sout << "QSD9FkMCTY15Twa1LqEJy2TMZaDkE/UVJJB0Xe9eRQRXEgkM/7Olj2qsyN6VaLpzNBhcXT+Q7FDE";
+ sout << "NyZ3C6b4MeoE7zRM0/KEXc25ma/uiHu142x4Ar2icGm7NYTp/gxVRUrSpqOOK2EYoHzCTFilsryB";
+ sout << "JhzQqEIsDpNqmqyjy0BR5+rrfBJFxXAaEDryGMntT9XoLOF41ZMUAsypcv6yujSkbCemiaCTJ3gc";
+ sout << "9p0IkAplKKgb0m1mJ3SrW2WR56ZGhZuVIQ0p24yHi1ECqcEK4GJq1jMpHpqIGfxVLUD8yZ6wFLDl";
+ sout << "NP890I+aZC1kFSBSuj0Jee1VbQzTykN9oj6+8Bo8v9Yv/qt67WH1h/oIP5bxmCrjTzbncAT0zs5w";
+ sout << "qzi+jN+AcQS4qh+IiuCke78wgaLtnMJIb1yA0GkDM6t9crv7vKAJwGkEFZSSg+p9TucFv2RqCCjQ";
+ sout << "D5v0X3g0OpBql7+5IqXmuTiRCJE6BIBeeXVOtD0R6JORJ0e5Hy7SvT1LrIxISqZwat7//kb168WS";
+ sout << "cdpRlCa5c0+ds81SVFkjTKViZAnFkOHPGVgzxrsdlfTt1T/2fx9MZO8HT3p5V9eM9XTASzXM/jMN";
+ sout << "5X63GR7qz/5hpM12SMJvgcznXESiudVq7d7HYd0NeId04fzMlFaGtgGLDRr+15G0eWPCk7mcF0np";
+ sout << "7XLz9ytRsWC/WXaVx2/ucmztaGzksA02If8fHmKuIHbfJ4YkjjKjVRrthMCOLsGYzfb9Hat6WbXv";
+ sout << "jkdPQZd/EYGM/ZUd0wfuWo8W7yLidl+zMgMcJ+LeJjvRdBkHU3o70srnlKBaneUv7Ly/r2xPcsj6";
+ sout << "wLMH9iuRZ7jZJ45EM3htNEHJzrT/9PxaGKiK3caGIilUnFPpyb+HgousVcBbn4XSdkIjy32UPdmQ";
+ sout << "aG5w8GLvMEJ+SDAseg5/B2poqEiGstsA7EbJyJn3ptHk6I0lUFnpCgHI1H/vp7f5vhSIVkDyP4SD";
+ sout << "GPPTMLfV3mmMzS/IeIdOLdshYQCrL9I/C1sgaASFLuh0fZoWnNmSxfS1dVA7fU597PnuEzGn4TlM";
+ sout << "CVb8UdlIBfEmrFXegOnBEKu8RKspYa2TdawhfTZs1lyPJOxOewWH054k09q5m+p/QLHfmK4kr6wF";
+ sout << "eShsc8VFczeBC2MtoBVF+a2gGsz+F2Q8WhtG35kJuTqZK+lqB8clFAVSljny5eVo3WJ83QHoZMO0";
+ sout << "L8XspRqErV0+T/n7Uf9p72CQbw059rmyc2X1I1Mka7+cdqewt3RUZbnB7YD17+lg+do05keNDDea";
+ sout << "kkjLqmK+oKKOJUyDULGM2syzbhMrJn3WY1v8RuEVsLIUrCxgvMzD/Hwcl+lC3idwjonUrQuWWDyK";
+ sout << "dTjkimDohgLx3uoRs2ke/SCb7ERVtB0DMpLF3zytvY3D4ZGIdet4BGQ9H2nXNWMK9ahTfo4GFMxB";
+ sout << "EBdLiQLAqBJteGuoJ1CWdYGj6BDbYoy9sXSdnY4Hw0a8ydeBmFqoRm5wZy6ozEpxX0IeEc3W0GfV";
+ sout << "+4EojUBei+nfqmpeaDTWFgITbhplcgSU4snYS6dHxO4U66xa6N58YokpK0oSHkL42y9+Ap9qxiTk";
+ sout << "eDRtTd1wgP4m4EZS6ld1rTROZMhwgwZoyKizgzW3cc6o3rS7wNBvrdRMtOPVo0YJD0UZJtwCyDzT";
+ sout << "/LMXSVs5EQByRsZkKxNFMEky5F+D1IKlVny8ylC3oOI0VNJVxr+hIcuYjOf3adwKeslTQ2AO//HN";
+ sout << "4WEyXck1inZFxnDybu7NGa/WhNdet+PzgzEI43go+Nn/9AYLjR3m7nAcBOGgtMqmrwOTPJl0Dhnh";
+ sout << "QWv3JWK+OXqz3RFJ2q0lZP7A7EhU+3G5OHdHl6yFOh+Mb9TNh1ofYXPXL/mnnUhxCvUNYEXVdHGD";
+ sout << "RALydLzf6oY5id6VdYgCOcQY6c32WcJUEk2zCeMFNxyZ1ntzmn2KlawbF6MPWHqemMEvJM7YCFXl";
+ sout << "ObWzVlcxcnecWLltZB5xeRqDBcWetCMJvG5crQJk9V2jV3iXi56gFzOta7HzrEkKsjuYtys2c2aN";
+ sout << "EMcOMedGcMH4q3xkzca56eoR5b0mXeZRnFRnK82LczMxJjitaFLOMTA84YAgWWpWt7ENpJ1qA7bu";
+ sout << "QFdAdnQC+DKTJuLykS9eGpMZIuOarnlRH00AbK0j98XyjkaOhmL4ygSKNJQghgmFg2vyOfji/XNR";
+ sout << "F2M6c1HdfQK3Im0qLTXaMZlEjkWPIZTcDtVt+CdUtiEZaqPQwW1H37Eqk9gZAs1TnLe5AOHAMWh3";
+ sout << "mS458zVeG1d59r1BBc7pxrju9CWB1PjF19pxCUc71R8sh2D98ss2W02R8dqCAT2wdUuEC1WQwpL3";
+ sout << "GGGEYk8jB/4ccjN/+EdYqAulxV/Bp+q1jspEm10/EU3Cpu6RkNFZj310v0t643E1MqzLjpg7ZSko";
+ sout << "FkzcMRM2E7Z7WEzOQwbr4KVgXcXZ+76Xc1ahlvMqwkUQV+/PnYj68Ogv95pj6biD9vweY46XV2dF";
+ sout << "uxp1FG2j2e/nzzFJgFfIcsB1411szIaEqmHTUPBo26qg/EyWg0775HTFSZwlw8iDOFG13OBlz7kQ";
+ sout << "kTvrxy0iwvM6cTm7IqJUZi8hhC/eAX+Sdx8S6pKhOcy1q0fadSKF5cTynAQtrSYwDK0/xYRjVl28";
+ sout << "4oTks7lHxGE3Jdu0+j2ufxNuem5fcEgWFSKZnZGM4XLvUnpVbdjPMEjf+KJ0w8LqDbTbmZYXW69q";
+ sout << "Apl9sQ7AdAUcS0ZhGtACegAyg1QcJUjFvDKDhXwx5d1+0ZKChwYt8FFzqht6nWY9WjDT1i61XmbS";
+ sout << "m2Emc68sgA9VMv1AL2bOhXq44//vlGYj0bX0gj7ZDEdUhCVNi5aXobYv5fn4nGfTNKp8njHyqGrZ";
+ sout << "CT1xO+YrGMY/4qAGiBsOMGtbRrIcFHeeCMKOO/5so80js3nh2fw0L/XNVlY9RJXJA8WkEsalm5O8";
+ sout << "MUDzmTukLJQu6YIjFhCHjT0FA9R2agaQnGFZXiiZ+0GnsRoUSrGC5mdkZ9H6jg7Odm+aB5fD7I5M";
+ sout << "pHzEi++Jt8xHcbX0jyaQd4Y4pdku3xx1MrdgZht2WmCpUNxg66syuutjS6iWKQlvWSdDtlyf5zVU";
+ sout << "rRn2ESMesAxpmuFd4sG0j8Qm/8jzAF0Fwolv5GmJFlwKx5X+RW5Mb3HZdJYg3xou0Uwb3FBgV6aK";
+ sout << "mLA7GQUK92IMgRKKNXcDAPJdaF6wT4lin82Ss1QpWBBnWwLNTslobUbn1TJsg+MgurruWUB9byjb";
+ sout << "56mNxeoTkMh9h9fEuDZYL7svjxh8U13AgnJCgchaDj7iK0TOd6b+sbz2DadE2SoyFYo4XDTbXcpE";
+ sout << "LS8TbCb1rH7G2WAS2a4JKRyahhELZhOB/X+jPRtre+MNSMvoYOFYtHYXjIzoEpOgEGdSPOsLcA8s";
+ sout << "JHCoPcU7bFQcnIlB/HZVqbDZwVzAtUxKoven135MSyoRVu8j8nTIcM0RBT1VjQa0L7eHcgQVHPvR";
+ sout << "uhtFmhtUJJT6lDJmoolIJt+nxeT45+ndSTz16YrT4pODPMuawO3XIULSvkPDWNhQybkF+2jmUqlX";
+ sout << "brWKyviTX/IZsHS/0YMkgXZRX5McLzUH3Vs1PcjNBoj2q6tdCq2HCcIdBKH0Mokm9DLqEi0oY4mq";
+ sout << "kEsB6HFUnYZm4xezpAoY/wHkXRq9Y/6lOa4DtgBz8ng6/WwFwKwCIkT3/aTHzKDQSewYiU5jmQGe";
+ sout << "R4+wtbbRqGm4yfRc7z0xEsMESF2FJnODPSSSBAFT60rKGdGK4ai0IaYbWoWaHeDxhHVeOUYbwUlr";
+ sout << "dBHstgs0/k6t3QKbHDo0KwMv6LN0sYYDK0fD5cLA+92Pf8mOlOAS2YNlR0sG2GrSa8M2gZhpgTcf";
+ sout << "dz4j66DDlLmQaC9UozZNf2PxDFLUN8NEHu2XnfzYEyuyt0BQcTQyqs8DvrayzIwV/qiD4o/xRI0T";
+ sout << "Ma5RvRpagaW5EUApHgu09rkvFXk6ijL+2L+7j+lDQTgkvn+wHYwbTADm/b9tZs2egY+6pSDDnK97";
+ sout << "invYOsWUNUDtgvRxgFTDDrjUNX4xNifDUnGCJrODnhg41dOG4M6ST3gBgxtKkrmuHkcgafuohkCm";
+ sout << "w8bPHam0dnd55H+lj0J74uyhw/LreRAthXdcR3RbizO0rWIX5EDP5Y+JYxQfejANE9APE2zFHg6f";
+ sout << "6y7DE+GVQHO9iN/N85TOUhN5ya9MZ9fk8M2JR/440Q7gqoqNJ7w1PpzfHCyQBSszPQTPBr5WeirK";
+ sout << "3D9axZGpND29nMOQ8iZy/7b+5IinED5SDZq8MTekt6kgW/ScvByBVOeBbxh38DRAcClRnn7CdFT3";
+ sout << "Ey+kaJAYlahuErS/gjU3VkqPKGds02dkPwumMWd1G6s37r4hQdPoO0F73+5dxWwqF1dX75nQ5tyz";
+ sout << "J6f2pbTFnJcp4+CGMYs/SEoPzODOqMbMt5hDqQHZuEekNwMNI/X80PhnIOiMa7oK7h+8cOb9v4Fc";
+ sout << "07AR19kD1T6r53s77u7o8wYLp7/uf7NLT7tA+bc6Qeaiss+JLJznbb0LHa9uSUZ7dK0hvHFAYBjh";
+ sout << "fGqkQ2JI+adCJ/eUQLs9zvo/0GvEV4P3dZlRcuXw+y0uj/HhbMnTYgnD8AUbRYLCLS5Lx00m+Vrg";
+ sout << "lVyzOLy3d84zSmZMR/xoX1Q27biAyEnvZK5vkX9CmMY4lhP3XnEBfcK/UVxxZDQf5HfZIamjAbOP";
+ sout << "ZRyNX/+VYpWja1qGHZmW8Tc3aJfH9GsIKdls9/FKfuydYwfwr2kPuG+PTwXxiHKWWdzUXluLrmB5";
+ sout << "T9UCUmf1mkBM+Jv5r5R7qUPldDHzJP3njQfT4SUrNNXL/ooR2NTX/jYMqkTlwSCM5w7jYa19IDe5";
+ sout << "lYzspMfRfAnjG/DPX7m8H3fYqHXo6JKfWGL9NsuFzFPzbeVqVac5AQ7XcoEDXj8b5OmGu915yRqO";
+ sout << "zs+Oag12V6PRX1w6EKKyhQT9BPvW9E8I9kSKGZrO6rSq9pBaZk5DWES/UX0U7yCSHaBAlYZP2fmt";
+ sout << "tvEMLhzKwu5vjk35yr3Nm2ICYBow40OjKI6ft7EpneSgGbkBuHH7X2RxfCVcg0+7THSBMEMgnBzC";
+ sout << "+8JERFD94nZDWrX7P/QTaejq4L8yWMb7uIChSHDnIIUleV3h5kGuM0MCRChSUPvXUESKuhkezPTF";
+ sout << "KRt5t3r1CbOHFwnuIjB5JAMUhCUF0IEJtk1b0gnCY109aCl/sefPp3yznMH8tDAWTL6G/aJbRLxv";
+ sout << "DpBb20+ZyK78kfTrFaJ6xGgAyi/fkcggLa8ANOwU+ZbVKvVEQOzU9d/8ygn1SjNBASKaHfVja6Ym";
+ sout << "TffBgjcGFtnz2NkjUBccd83WqOyZOxsFqOugfmoavukdhDAaVtcB/KijSoBYQVhNITRC7/RLRJSE";
+ sout << "/obYwojRxTfq//cIE33pXW5RCDP0odyGIr4YVCHioG+li9NK2/xrduroOiLdaKly5eAaFZgiFof2";
+ sout << "jVYA3mXRxwH83YBdKUSwHDhItv7R2lAT0nsfXopxcJBQVNwPvCabAmsCf71FTl+Wn/LfALLRgtvl";
+ sout << "b9JBLI/gPf7tKXx/MhCyz0Lq33VoyOGm246DvalV2QChpolNafxuxK5AnvLxYox3U+53A88SddqM";
+ sout << "P+k7oftTdX+u7RhDmZtjHLYn4ikjLwK+afDTW4hCBGFR/bmDsZvAyp6OOVnnvsj9qSOWpeI0VGQe";
+ sout << "tCaSgoPEwP8GtY4AX+Dhy1uIYPmoXZUPyNQ+ng7MlCk5S6HAzLlsd5giVhyzN4b4N7f6vseMubhI";
+ sout << "/lG0Hfcg+rMX2VvTF54FMPgPjkf6CWr7QorPDB/Puw8c3dt69dr09Q/r16t6aJmtyVa0EzAxiqsR";
+ sout << "n1POTejlH9K3Ul98w2mthFuaFVwk0ILJjCZZVgY1GxlvYRzPs1/qN+VQ6GdLYFaaBWN3mI20aPND";
+ sout << "wcKX7LEodSgu63Da0BBrKBkoihTYg+IbO9m473LWf9U4dZDQmSjJeISGLa7HDoh/JAnfp0Ej3EVG";
+ sout << "K9oe/ewohkPFTgXIV/qg4LDRny+zIJ+jsEOHeryCzBfml0mR+WWwwYYcPur4cCABhkZfKGVH5L9o";
+ sout << "S2pTxevQeY8hDj9dUblfU7W9RBl9t0j6pYm3wP9wwP1yh5AKezedXy5VavBPk82SzWBqtsyTpODJ";
+ sout << "AWQzgF5G6yLvPeUChHRDGedP6KzrPxkSxpyKFZq6dTNl8g+yLg/FHwOZH/R8GesbKKCxjjeZqD+4";
+ sout << "UjB68IayH6ttcLEjncK/rLOWn8lWrnTsQdOf7GznjWDHILJ9B5hxEsKMKqOfYLKosPMpIJFk8UE6";
+ sout << "CYffnCZFI5/M4stj/2A4zYt2kcqIMN5rIBJ0hJtBlfZRTMweBmJjhAKbM3b/ER6jQOlbPBzNNEmN";
+ sout << "DvcgFIL1c9/mqfI041LVldLTpHeaNDRVVXzho5+iVi/R5UfYYTvvDU48E6cKh/ErrdGHXURNYRXZ";
+ sout << "ghNAKIOA3YeXZk0SpcdzxRhQaFwLcim6U1j3kVLEYnk17GavItFxjv+22zr382ivLNfjVdx9vxBj";
+ sout << "bRqj1fy/goe9ilHC5sxCphuoe9vfAe4Le0b2hqn0Y3LyO/lvzJEVbP2hkFbU06YXUOktXJVCH5bk";
+ sout << "JO928XrFA98vOciWwqtRXhO26A5yELcjimSmqZ9zwt+TKz5cG53+CXufCssHPHDSaZCbF2gxST9/";
+ sout << "TooDJUbagLQ7VfNZMBIwDVoLpl1dG1Z7GgobQ70lkUAA6Rlo7JfhNM5d8N8UgM2dPPukCAQ5yuMl";
+ sout << "0KTHpW9a+qp3B3VE3+8mPoX16p+zpRlw5Y+jA5ctu84Pez0FqZcTmM4lFcevPe6C9LCxJGzKJJer";
+ sout << "yGyryVbEzPmVk6nsuWnHFodPve6NeeYnlTkifu3FBfP10DzXYSUO6nPwSZiGiG47AYnyQXmIcj0N";
+ sout << "iSwKDzHcPf5IVgT4qZswNoy+t8QBATl4run7klhzNdE1QSAp8eNZnjtdmZdGY4UhB3/mto70MIPV";
+ sout << "OBRoyQCHkzWx8tPcn7NMUW9lDJdFFgNiHngYBe47fz+Tkuv1iBxRppU32ByeXHOSKMgaTyb5f1XS";
+ sout << "ern+rWMqDivMybInX6vMV8BfQ8eOcnl9MS2OblGPj7XDsbLmr/eOe4cFoKdBKg791eavES07Kr6/";
+ sout << "LXw/mELHEEXBqkuafGTUlUfatUz5OmHHM6ssKoQM1uUrGqzgcdiAe8vNxuMsapJYJjIb80meaYia";
+ sout << "EyAvl8dEYiGSx5etaf8hwCnXAOzEfHNi+qjIC6Dqr5s87SQpYPDexkuUv1hEGSDnEbWuDo7BSwu5";
+ sout << "hZCA4T6yoSSki1hhXMMsl0tR45bm6UlB74VX1IsL7VD6HiPY3BcveCMbk9aNCLt61KYwY+4+xEWk";
+ sout << "smbgq1BOC31buf7xQxX9UtsyBHQig3dCScPoFBm0nKaEHK1fuf8zN2WIi7+p3pEQqRTphOxHtsVk";
+ sout << "QyC7PmZJS4merPZpki9Mh/jo5MM76vCXgESUr91CY5neMliK6mlVDQjWBPenKw6JwM2UeDjjQHTa";
+ sout << "5w1vSg47kgvLT4KUfqQjS/JDx+u/RIrKGZbsj7WnZlJXsBvoFYn5N2UGBj/SOLVmCXYhBfUHZxqH";
+ sout << "GvrtVWHffKoSHcVlm0G0u0EKYYvrYCydASHOegavtcc66k4YIqge/pv8j5lTBiswafWuBz5OwbDe";
+ sout << "zxzAcApflzsfgNLol/DNUutk3QpBTk2qW2XXt4r6tkfCG89NsMcr1L3E0+NqWc1IbIIxJuSBhq9F";
+ sout << "wNvPOBUYmMD08jguGFHHFLFTVAhcxP90IP4NN6/ImZWqUmZ7DsQz27ritt7RAgjR0aXUJ2AipCss";
+ sout << "LWr6mIptUwcOwPHwqwrb+s5xuLIGsT+hXzwpaNrOq7AjKx4GbRcL2UhNwDJ3jAQe8K6uJxffzABu";
+ sout << "x1FLhLe7N63S+rKYHMLSSQMKCdf8uxVRz2NX44QRIdxXpV5/mHXgDd8qysZkv26IGp7DG6Pggag0";
+ sout << "DtuCocsV49swsov9/gw2yR4r8ziyO0oUMOygSA0Uirda3B1MOiYAvCULRc/HyzndANKNjef+uagl";
+ sout << "U9r+o2c2pqApAZY9JeKNj0vbW5mmIZPU9B+/LNGnLAmJvpbrcRTpkHI/0VUV541n26da7jXoJ9y9";
+ sout << "ejOkZUY9xMjkLl4aqgwi4cfg3TJNC+0GXjM7aRlJPk2uPNujEiLSkrPk5GX1+jlZIdBmWSfIkNeo";
+ sout << "XVQf2dVu1YlWKqmRHsgnJecopec6lhGHVIeamgsyyO9GEmQBG0cZ0lkkq3Fc3Mhakcbnw8InWFYe";
+ sout << "skGjFM2kSHQ8ImGUuGx9UW/cKMZbq++oZJQ8y/rmTDfzASJWXezdCOJsOAEAqstGrVZXO0Aoq2Qr";
+ sout << "rwO6oJYlxdCpc4MLBzriX0qlA/kj+3qc/lAb7svIugXusyTAshRsSeKgBb4qNUdJ+31j5UoZbwT1";
+ sout << "LQt6WHxVFoqYzdjnYsdbnSbV6llixlEfdmdg+gyAQuYciR5z4B4D/dydNjmbFU2BT9hbeMqkam/3";
+ sout << "vZSjziXOOsqMnGj5ZA9yIPwKcU9IqvlhxSsLPyFN6eTq7EUV6/njn97pMQhew7hrhXWpoq66A6lG";
+ sout << "5kE6M2y39eZuQnh1GmWhF2Pr+0Y0hk2WHx+h/6sRNVTH2qOm0wFh3g2ZqCPgQSFSdam5f+Jo5yYi";
+ sout << "IFxYEsif6HxxKGg100Yt59WpeKCVjYO26uc+bclSKZ9VNFsowfyCI3ZtFOp5aqZne3aB7tNkqUh0";
+ sout << "387ffCRsqMoQTLLZcycLiIpcCgmOCb8ASCMi5WgPrtZxKA9oB44xUgosSQlRgchUdu5qtROn1Q7f";
+ sout << "PHqeNv9EyJWiwHUyLp26f8pus1iHAYIJWvoFj09Q0vXiyvPM4iSd4tFX7kIXxOSNy3ch1FFHWc6j";
+ sout << "FDiXSs9Lf1iWpvF0O9iqXZcMdez9dx3lnNuXBttsbNFg6vBABk8K3DFtwDVWg6a4CCFlVmucQD4F";
+ sout << "Nf2eNdlVUwwpJn//rtDlvPnZXqpqx8JCEyBcYt0/oYT5vJdrhOKRNiteeUuWf51JxdJh4XKnLE+e";
+ sout << "8JAmuTL3AJ/gp6Bxpi+4P3jAR9mZfA/sQnTqasw32kz427/v+hGWMq5j+Ak0wGrKdTlDeA7KLBGl";
+ sout << "vbZUrSR60wvpL7SJSYkM5XraJzR5ICO+IxE+hfsDR2uTSrzfcsb0M6wnVatnXjOFNLL5AY6jpHc5";
+ sout << "sHLhUHSziNpnsMsWJ4h22wbZmrbO0zeD6wTHvUtrTwOTiPfGkgvz3DLcNBzcn4oqfaEk5JnXoE06";
+ sout << "eH4+bqe1/kZ7tlZLzjd+7lFAO8dlAA1KoIGOvAL1L5aqnPbCuJVaak81GSiwHlaztibjOB9Qolm7";
+ sout << "y/ln+qnZ5nbMYuWnn3lN4m3Gnk692nuMfiqh4RQuwwxzR1FZtiCzGBPkVD50VMzUsxs5d170C4MH";
+ sout << "ER71dvl0U9lLf8WKsHyiU79tJv4wTOpzSMszbf+WCpWZXCJrfyJ6ylRO3dvk1QfjPmFkUwiuQy67";
+ sout << "q+lFfRo9N50ZvVB2bYNsne/tlNHPk9/WdeP1rtTyMBMdl+RfJUf27wduC7RQzMtdUuhFLYxdDGxm";
+ sout << "1ZZBolFeteOW4IboI2KanbMlv+GOAwxkAWC9ymADZ9YBecvz1unjZkLSGzge1MY7ORdnLBA8fyUv";
+ sout << "JsigTjg7KuX2kpkuKxQHYs2l78Tlkdwo3x2VDKEfNegyiZMGMr8CSksLfxnI9dHwbUmyBQWMZBOR";
+ sout << "gWU+s45M6S9+HP3R+lwsT0Pn12HpljhnPde8ZpemkMsTbJW0rCQnYsrdryfL/X1GUxUxhNm5WJk4";
+ sout << "blqylbW9X7IbbUCB3+8A0phl522/wLWusDD9ndRsiiXj2EDO2Ah5MXodILg7FBo8PpHDsgKBLZeK";
+ sout << "WlYVQlWNtg6m/jC5004csHtx2ra6MjSxAhxyt2Y5qf90KBDt/61iJfKC0N9hWm5KKMsGntwWZBVe";
+ sout << "h8eJjOzM8vP4azMfymC9U87PqPuI2/J+IB+JNVnjLsx2z2NumqgAKZc4mVUs0PiNK2sSVRlMU4uF";
+ sout << "DdbEn+oK+iQ7ILBnkRhDu/C4h1eSyAQwF8XENwyYaclxhoVGRTCRkqrOaLh3SSb+YFSFvbDz49jo";
+ sout << "ZcfxsSvLNgIeW0MnmYUH9ydCunC+y9aiO2sZTfKrl95BeX8aEjtxCjpqVj4daFz0diNEQSiWIWlU";
+ sout << "/EqYRDfOIZoA5QkPBp2rioNMsElAHaosb90vh+LiDXpOj5cz0S9cKvQw7hI5Ma0gVmhQbAqFrnOD";
+ sout << "Me/94YZgsabKZ5IxOJrajCoF1WO6GgdJCkdSUrfRZTkxe0y4qBPAbVg46BGOTjnOkSxUUzXAjXDE";
+ sout << "wPf86SdrIWq0NDJ3N+D2lu0MloB/tabrVHmBXQwNDqk24uMVQPBQRqYUGCTzkjr6awEz6oICB2iC";
+ sout << "14KqcsiwuSxIsY2jyL0VG32aHA/XNQTZetsvMq5fee0jzSZWAqA0EFzw6UFW/kUmVEsBdX5kpYpS";
+ sout << "XuNVhtxVGey4zANa9P7nc40VmxDM++pP/45XNbpyIUOPxfcBM5YoHjnpAwZlTTUsBi4Kdx2D9oRx";
+ sout << "PGlK+TGQF9T9ZvI0mxDTXiotRScPozQ8jom/oINYIVzdH7EWLEG+nmKib+3icO9A09pOYWw8TSZj";
+ sout << "HNAxPczCwc98w5FdlLe4zDwHnILl4NwfnAz8x0QU+VyW6kugntyk+G1NvFHIGvOnFnZA/Ku1nqM6";
+ sout << "3jErxhCl0Ii8h4dD7+HVUu7FWUMOM91LQHsgmphdHA+NQXm+/J/zj2WxamzK8pfpnq9SCYpNfjGe";
+ sout << "NOJGOuytsDuG/Ct5fik3DquGqccuhve3sw9H5RDhaFqjWOEPT2rhlu66UfRsyy6Y2FwRWcB5YuLZ";
+ sout << "+96O5+VUhbSynvOuygLQfmCZb4RR7h1x4RLgimV+8DdDaH8abjuhKv9bFlSFUNpireUlxkYk1NoK";
+ sout << "q+3XpmpJyDI4N/qB35pf7RUdp0mlTU8/cF7g4OJ05ryskis2/tURAdoKcePaWSzn7xoA1tCGYC1y";
+ sout << "+Z9m/7erwcD8n+1ZHu3499wbywGzscGo0q7Drc4SxiwUUDSH2FWpaoUmKEUjsYLMwx57QPZVIiH8";
+ sout << "WjLd9hcjaht7OqgIQI7ihdSY5TTvihLif9siWmrJk2kpl7Qpx9B2VSVfFBrie6JU+6Ecn4XIMY4n";
+ sout << "GYVF/bao4WKkobcbSYCmUfTr1gx7hh8VkIpmuPfiyBVHXI1dQtY/BDxkLIBiUIk509Kb/U0xA45N";
+ sout << "+NPG833fYqJ6p9ygAJCQJfmymr3TXL/TxfXtSQrO42/upQob6+KEjSBpewaULLu3+6diY1/P9n79";
+ sout << "gLnJOnGncD988L0KCthNKXM9Rm9hzGjnObcxg2IHckRu8F5Bn298963sUe8x5G5BKCLWTTPFbBxO";
+ sout << "Mr/zSVZjlKaafRQYBWKE3BPQRbLKBYN+XaNuEQ/cfxnsN+gUimbvpZUueMU0Jeo1ReHqZpZnjvAE";
+ sout << "Upf5Lrmjpa0Iyy3irUQ8HEVEisLMdpg1Qo5RwuWanqTVmGxW0sCjfrCOhV3TwIQqxeWPfc8o4cRO";
+ sout << "uN7IkWSyck2HR/4ncq+tfaFXehMA9rxigFkeXNh3divWzV3W+U/SeAAQcrgUgecpcxqmGkQjqV3k";
+ sout << "Lttwa2MF5SCMfM8LwEG3Dl5JPvPfTY15dIUGNjEIFJ25UuJ1vK9K/ns4ifG5QxNKGFXLvgdv2WXy";
+ sout << "0968qdLYeIMXJAv3ieTFm2ivd6OVs7c+jp7k2Ondx5e/WeC8ciCOt7JKyAbJcCZYbasFlaOOeNOO";
+ sout << "tm7/SmLUp1hZALfjTeoDc26EtdeNf2x0HzFsGivu+/qd+eYiQXLGqStyYPVc2IZ08AYLFClR2r+L";
+ sout << "to5+RouZ3CCXiBqoaoZVsGiqyM9Dgt1E3PlaXtgLFzhpXNw/l4FXFKPiqS9DZh0X1aNQE1KAqLgM";
+ sout << "F/VYjhOpbzycTrRhiJ0vhzaQUiCCe/8QUlSqCVVNZ9w6Dvc7uuNNp+exxN+IjPcWoSYDe2QW4MHi";
+ sout << "3Sk99Bhu+5H4tVqc3ehExt01lIjQI/M1A1CpuwGv9Sz3/tspG0hYpOT+Mo7sHZ7ojYfkfLHOfp9J";
+ sout << "gtUT2uqV0LtDgctrD5vx6NEgfMzP2dNnj7wopH9P5YsTv2XPW+bKxUp95K5KMz0Ea2xLmrZhZqG9";
+ sout << "HXhubQS90G74ibC9p5nvDts1DoIC5swJpo6ZzVude5M6GmzjVUHTaS7LSjIbxQBcjyFrx+panbfB";
+ sout << "TLLL2VaFjfxw82RGjjQtaeCeZt0BsnkfL1A4Vb5ItP+qhayO+9lqnSXdECHa1qFTXKDVPvD+SQC1";
+ sout << "cXQnxXvCxuLmHf/ao8i5QFXd7tHr96ioCtng0PWHxbXyr3NQhriX2zbwD2kkrrL4udvZo2TRIe3D";
+ sout << "/L46KWG8S+zpo/iXc68pt4sjfuc6DNIXDtx8xkFAre/q9dRKmuV1BrtlYiIBQlhDByyQuqD4Cdwj";
+ sout << "1QMd3JkiF/WXCc3vsuLH5v54EMHM02MkS0TjzLjOinkMT9s8u+ssSyp0NUE83XRdUoND1Kw8DbI7";
+ sout << "e8izxcz3VIpR0PE6MYUKS6SR1Gq+haGlgnZYxBCbrhSkTRn0VQ02rnBF9An6rmXRHbqs1rJZ4yXX";
+ sout << "9/jj2dPwYrFW5NMXL35Za8fTfG6pXaDjPnqJokXt2rKX8X2/DVWhYQwBLpZytXTc9RpDis4VA6ao";
+ sout << "0ZG/u1qgQeqsmhkwTayh6S+VFpwrQA5upwIL9KI6JdbCCsQhazF7qsEDoc1GeAntjyqntc+mml41";
+ sout << "I1ZCYtP0sSOWfWiQ5yZavEsrp94P01thtx7s2UsrMtrYQc5hxs/J8VWtIIpg9AIqDW7g50qsBv4w";
+ sout << "id8XQPCcfyXQszUJi+w4Kubu+MPwAMiAVkDmlJGAC9t5oXIIwIbvNZHrq3C1ixd84+VqhvZZgw+/";
+ sout << "izIXi9IrhvkcOJmuwxQ9FCyd726/ks2e4H7TafuTXyE1RQ+6Ju3odB4meFU8SIylbCw2jwoI3kTX";
+ sout << "XXqHbAcdgMWoxoDXPZ0XzVqC3utfaOARHji4+RXEseqA3tJhS5CPHj+D+6yXJivrVC6KWSwPqLyW";
+ sout << "6JI3rVeT9B94EYtGH6qWvcA2GaSJjxiQUZQyqahHk/FxSkmnMTPwtvV6AcmYZjQESUWYI9UQyHvy";
+ sout << "5e/ULXf2bVF3dmTWoKOsDchTNOQ/Vtswf5avr9tuz841NKT+1fojuuAhhYP7uvRruvMJqebeiMIL";
+ sout << "QBzv27ji9+tQ6D42+Z5U40UW5XtytFKcGR1NWtaxNkWCPMGRLcC4mFLuQMYq8cK3ZZDhpzyVeyIi";
+ sout << "nLJRFigzQfHlBeJmXm8UccbJYo6oruyFUEagjILnb+fb4y+uE7vN6WoqPcg4vvr4RzSkuHueZxHL";
+ sout << "5ks+O6XyEywn5hU3kqOmI21d+C/HBRjJHbiL39BcHKdzhnHJ2huEtwqSV6SsDiqhxSUL7zYEDECj";
+ sout << "zs+9/3LHJ/9TbZZ104stYMWXIWuLc2NSS0bF5NHarE4XedBAcXaJzKzlj2o6NYb4Ifxcs0shAxl4";
+ sout << "6ptwCMOtvcTz1t5QB1vG5CHrc7emzriP7+H1tukmCRA2i7cZPumoiw81AM7SPv/A/9yr5EFRdBmv";
+ sout << "FCOwJNDKfD9Mfb6U1Lk+ikWFR1wm3MSqq4ZSy1Q1s4F0npfVh1J5nWZTxd0Z/TqDhSAUU47cC9do";
+ sout << "qRMBg/bBPxfrWGeDEOLActWvxMr2g6O4tdWLxTtWcGsGiatS9TahYclwL40gnzAC+4g1yOgc4/u4";
+ sout << "Ye0T1KyUWUnAfpWmFibwphrKOrVj4ZBVzahAWMG4jWzDcxTaKoZvAtQZi2FOn4V/+bKMURZChDl8";
+ sout << "DZpqrBSfSyY7DnPiGRgxOmTKK6/kb36nlas7zwcQvSgQTw3vPJSU+LJg4gKmDycFpl5deILsDRBp";
+ sout << "78NSCvjJqK7UVNfdAG/2C9nQHNk9nHEf3jo4Er7ogKGkWOHshaIvzL98Nz8BHbr115gtKo06mD+C";
+ sout << "0EJGtywTGXAzkO8J3T/gEuW35KH+kOIBMXzQcJNY1lNud+fALQxNv2Rw5u17KBXqLtABWVaOABgz";
+ sout << "IVCkpM90yG5CwxkzyKO+CIxb8JyNs6Shk+CPNoiU8mbqMFigPgXw+yWEEdOMvTdzJuj5dKX28e+I";
+ sout << "jwOz6wcXPYod1yzF0kPUlol0XaDR1m3abfEiTwquKsTY7upWPjU+Q4yUnrP0/+W4Oe6TgH7JvEvy";
+ sout << "fhqLicE67NkotZ1/+AMZVDYWZOScrGmi8c9Yj5dg3lWi5OtfLYg74TSQ8AOiH6jL+wLc9J+QwOaL";
+ sout << "gibI6x/dWW6eJGHxInVjQrt0xmP6I90cDPOnx8JC9tMAEv1MrLMt9VKpjdMut/Gok2doLdEHyTKL";
+ sout << "wBNiLGq0viiPXgm9/sbi3KGrWhOdypAbECm0emMVQK/WdkSFZu6PSArdOTvHthXOvyTb58fm7wS6";
+ sout << "oWvK8J4f/T9t9aNb4lBAYFtufB5S6ndUr0hEU0nirxgfsXl9ZsRpsQidq58dK6l6WI5CmUpYboL2";
+ sout << "6T66Tndz5jnvtPAybpIus/Kbmh3tPsaVtI7e6IRl5mzbEngtlOqfbwlyJyUiC5UuGJAUCtqoKadJ";
+ sout << "QKGtTuLv2VqavDmNIuZ1mRX7g+2Kbf6y+7L42iqivvX0Lmo8YSJFTuNkWTNlu7s+yYH91uC8BdT6";
+ sout << "3DqKRgiRWVfgKuuJvDp72gyJknKesDa36+iCp2qaZZ+yU0XhK/6U2phZ6XeNJlQY5H8JD8pe+RJk";
+ sout << "qgi9XqP8qvLIIv0RkfnDVJtl4V8pi6I6UiaD3dNUeHdN1YHCu+Ub225CFQIDsko0Ly87Iv4oULoI";
+ sout << "1gmb6dLkogavqkQfz69GYinC/z5/En7ekQ2TnOgFiPV3SaF70p7RmyMnn1/ON9r8vd4NfFPXXCCL";
+ sout << "NBSihPK2Tt3T5i+TRXipLYq4+kghSyTRhT2YRIbpvT3yYs1EGbEekCQQmIltN78NpyWqBwBks9Gt";
+ sout << "xpIS1Mj7Ff20DpfjgaSok+XhDSDA8pjNzuM4Hv55IJbF5se3Nj27glEhioHCvKzx84FUuInAjQtT";
+ sout << "0+5hsgLkTppvJTdSUg1hzhU7wa6NixwuZgfUenls7CwH9K5WYB7KN6ls8gzbK0V3QMoyZBRIoSuO";
+ sout << "7yyzgY5erfXbwfcYQ2LliJQQyYyLiEcm9HCxJGMuBL/41LQxxm5KBXMZhJx4c/S/54GJmTMDfU+3";
+ sout << "xceNujbOdtlz9ahcxUBPGOU5iTYBGVUPFscRy0nxuHhOu2VI9rYBhoIUZs01Sh3BDgksu4uUnGof";
+ sout << "crDdCqjpR+1UVnpoEmxbW/Z1Ux03tiG90lCn58/g872xPsO/O/DDX4wfNU1yeibFvWrW57k0FND/";
+ sout << "lNNeLErDW/nBpQSbaWal3O8lfmjD2SBwk3xtJ2qXx538rFZRM8DQJUOfUjvhycRGITPP4GZBuiE2";
+ sout << "cVpeof6Vqbe6J4I3E9Bcrnm3moAyKGiPDJ6UqthQuILGNTH2GOOXAl2I1X0qcJ4G2H5xgLNliett";
+ sout << "T+uB6q/kG7Ab+4QNVm0j0zdPdVZdUUtGceMJViJ+ZoRVjunK3QuzwgBHYr89OqNxvULc7tImHva3";
+ sout << "xan2frwQ2ZzJy8hRCIdaNSNgdZp4WONm+5Bw8/MU0l3PjjXjtlzjUE+rHSwdv5abDy81XQwkDP0y";
+ sout << "CS2C7MMahBsySi7v+PrLljFQbyG+deR6ch15UfFjtyd6Mctr8FcW2umvPYSnl1g2Dl0jE50ZLOHj";
+ sout << "2gRAl1EvFXIuPjxcA7gsrlATb2SgCUtrelJSDOJpf8a3WMKrLRYp2MONLXacFRpkI0iU4fNhbzgB";
+ sout << "o/CdP3O/AMGGnaZN5kIkmOwm+naBEF3mv1xFE3ZZ5VldoFrWrxZ+9s4lOAScPYzVpABjbJxzOzSo";
+ sout << "fz7+pEOY7/OY4oNgqWA4MEtO2pMbjUpdTbSiVuXD87KHJYB48ffxkA5aPKFWpdRFCJHM+J2Zjkaq";
+ sout << "ZRRUeBZVzo2rRvFBWrpbvAkkaXEDk+k+N3mOn1LF78Qbm5FWAn8aw8QL7oQnmrkEqUj9EAiYTx+q";
+ sout << "v3oRQ/x10A2LctzTyy0mCuM4bR+WQW2h8dGoMUhjJpGPQzeR6owMDi1n09T84aVUDkxfR2PpIHxs";
+ sout << "HBag5ryae61mzdzqNmyFzySbIbD83r7ML6FqMCW4xpWF6ru2DvNykb9GYL/kquN1rjNOe9XYEQM8";
+ sout << "pyYvcipUqJdEeYj9sjgJBXkuDr8fCBpmvJuklMOeuAwAMXwxL1b53VM0XNgVokPmFTSF5zqesA/p";
+ sout << "ah/7f2URRjrP9RR344YUdzGoccdhpNnFQ+g4KRdmd7FuPTJYePyDncJFnVZNT1TP026QrRPzg7H8";
+ sout << "sFMoZQuzZxjLy8gSU3JYcBCnfqBdcvrtF5Wb+pKkEZxROE4qURll4Y7zYVxPv03Z2PvA6eovQyap";
+ sout << "Pr2jTJPFgyEiAtcdsluxtc7NvY+B0Oxgk+oUyaXAhkihKKSGT2pPRUvd0LqDYU29/7v148UhItLJ";
+ sout << "minqiK/TinqUPHOe8oyljHRIO6awrl1oc7HbFs/8ZrK8hd07LG27lMDcrBogEyUZ+mcBRykohy5w";
+ sout << "vEPLCUA/hLP4NHaUts+q33Wtrq5n0m+4k79SRjwuvmcSXjf+Uf09CQD32oicQr0UEfo8uhrUPwta";
+ sout << "WukfxXE/VF/jfynSXwEomRZopcNoc20gWzRlk+M7HpjY1X3TCi0T6f8qAJuTjSSbUZTFC926jhsk";
+ sout << "a7Z67jTJ0woPzPFjK41KLWQcoArKTuqOytbd5fKao7qpxiFnUPd30dq6Xhpnt8oW2QESiOPq/4mT";
+ sout << "Bdtd+wl8c3bzgxRpOGBKakq0iI5Vc30wc3vTUw8Mpae/beCHolCN52hJF1LAKRJT4xYWMHLpKwNl";
+ sout << "ZqpKMybMAGM7O5X3xqcE4nH+bPkpkMILmNN2x4JfWYfy+667pQdIed0jEaDRHueSfyuXCp+Yu7sV";
+ sout << "q7CTe3bwrUSklM7zpU6NxuX++oMNuruKzZ7GwsP/NkepKp8MGUyoT8EOz7yIq7k9z61OLxz0Ylxr";
+ sout << "0irk3K8oCPttZz8y3u4ZmkAGcYOs41JR3Lu7D+/V0hjdGtNdb3ALJON1AB7r+OQ2LW3SEP87POnT";
+ sout << "R3wayKImuVIpIjZi+yBSMqomZMtH3nExOSKTHzLzkSyefGR3WTSIXI5GqtWNetBTk9y2eeRzbSu5";
+ sout << "+7c0XsuaWRQd+2qfns49HNwQocfbM5kNMKa8npRCg8YUBMimQiihwEgM6NJQWJ3ig6vwXncIOAYI";
+ sout << "kwcYWkLBme6vSiaeyLRG8y6A78zeoc93lGy87yjRWYkr9CeARb8B3tHX4E1APF0Dcs3/6MTZhnmv";
+ sout << "eMdEj9TW/MFATPy0BELuV7nLBpSrH2Intvo1jzPU7dGsw/36MbHr2LFO9a2PrZkMBCixQ5kApER9";
+ sout << "HQCKGangWrdU/jO76/8j+mcrxFNUEc7y0OUUFK8ZXwHMXQp7MOew6OVUmRuFSl/jv22Zdz16njaZ";
+ sout << "Vd7fVrgOdTG+EXRseg0LvvzZ40qWpRahwg7zqP/sAWp1ZhH1yJVoalrCFuTLgabjWGo5Amp4vyk4";
+ sout << "zzseJ+O10oZZrWsFOg1lbPjs4j+U1IhQ48qNS/zk4Y2kaiM7/0q7PkaUxdwdcdHwbOmp7zszAyfu";
+ sout << "rwXlt1PBiRyEsBKKKsBLwUzP/KJEoRvFvgc2VgzDxxe7pOsX3E6+ey7gKt08W7RNTTGx70YF2uVG";
+ sout << "4cEQHQNqLygscv4DllKnH7Jz+2cHnSES9us1PaWqRwpQ07XG/sSI6t9o8P0xMBVkR6XB9JkdKP1o";
+ sout << "Bj1MgxVN8pMbv3ANGQzTbmCQ5IczVK1Io/OQbSEVlZb8ZOatdQ9nq2YZvedMelM3C+t00b5AaQa6";
+ sout << "DSslaJ3FWzJLguvwYQgYUKvJ+jOC8dMr74YNAxt06b5RaijSib2TNDjnZ3tIySixM/dYOnygj2QA";
+ sout << "gubdj4tuvNpWh04VszgcZd7TbLntvd2IA8b+FDDzMkyT067BC51elULM0CWKoZgaaMglt3LqR43C";
+ sout << "H4TQMRoitKUyfjAPE07UmUi6lEGYcrCyawT+Si9YQy+T5vGMpryrpy9gaHAmFQTGCJgit/0g/c5k";
+ sout << "mbyZ4pilxX69VO9VHrc23YN0CMQ0W3pP/DpPNYQTcW3jeG2T3d8b48B+iY9boq4ONEGcx6y4wWok";
+ sout << "SuOSzK6rTR834/xqlNGMLNZWSOFjLdq7G4oQLmVZVRIkUXrQImDl5W+1EjdglFE0jAnnuxjT6+DU";
+ sout << "B9VIMbIzVlVId6xVrp6Kg+Fb5gcxGsKcpH83c18DbuuIoLsywmIEiWQfrSV7bPVuPKJVof/t+Mar";
+ sout << "Q32TiRHMO2ZsUXC8w46fkI7EsH7mWMWOMfhkvnY3AVoAj2i6+orDHNwnTmWQiGj7FR9Aw5CQcCbh";
+ sout << "W57bPF88lkQMxdEHBd0Hk1c8Pm+p3x71xzdIofXjTnCd71R8FIIvoduLv6XQttPFatTHY0CeRmjI";
+ sout << "49yWBFJ+w+BSEP6cnTQeSo7uAOyit2S3EdLudn7XEBr9eN4wIVAF5lhti7HqJqV/pfqpkgaAQeEv";
+ sout << "mbTG0L3+6Mr4RPAKaa1H1aQXcYT32jdC7w4BKaxvACcyjNuukUwhi8GxxXxIktPrOJnwNCs719MY";
+ sout << "TbFkKOxloVafybwRZx5DdrEKMq+jKyCffePTaM77bvL1hNyK/78Q9TdEO4hx2q0m853zlnibtbwM";
+ sout << "YcO9adPVjghDSU5amF4Ul5+rMxroEoaGKLW8+gL9DbmcW6Uj/vg7TRQohnGbzmKwPWT0hu22ID7w";
+ sout << "+ILYIhGhG337Pr0Osu6m80nETubjTijMQHAb1/Pwv2eNOOdu8X2EM2awT+qtxqHCq6il4YFSbUdi";
+ sout << "+i8vY32x0cXZ1EdKfgciuKtK180q1S57ZNZTCwe26z5eLXomvjtxLxBZl5z2TJyPEnJprnZWNOo7";
+ sout << "5nuXsYypcrV3W2KlRmybUjRfc+/6nMKKBtU8U2phCUg1uVbzAXgyKJuGRmwxUOXXWeiS6f+X02yn";
+ sout << "ZpJda7aMY2VBMEuxMRw7P+S18C/drO223DaUROCv25mTOZGKRI8iXQYEa9UxLvXgRfUB+eEGOuAS";
+ sout << "7N4NVz2kfzGWlQdKqRoh7fWDkM6R8J3BgWhXvl6dacJcZyeRQGjpR0ZEKDFTtLK8Dr9NFYXk6Pnq";
+ sout << "N9kMrQpwK3bDmdv++XISq0WHnf6SBJcNhSx74TUlUDBxsYAcoFefI1FNbFFobGmT7lEX+L7uQNmS";
+ sout << "/TMxpzGwSeRavNmjkanqTTZFpWNvRXrfsod+TmJPe53ApSSZpiuOAedN0/3AGsVuqjNtcxf4DUCZ";
+ sout << "13v1FAYWluQfSXtsM0weQ0q6ZvhM2SyX3VjNleA0/X/PPmOLLK423iG/d4m5+D95t3NRAbU+TEUp";
+ sout << "Nff+BWEiVSlWCTwbP7mTaJPnAUWND7rjif1IIrQLL9IdyauKUOP04yMgCk/LQs0HsDkLxSexCMHZ";
+ sout << "rK0S8Bh+/2V/dNF54MVJEqln+Urz2REIGmzqSjly38FZqDwG8Rn3dPYImGASFgGgrCAbYza3ZyEb";
+ sout << "yaPzMiO8wN+1F+DbMDfvMtlfkY3fO8LvJ5csTKK94rSw2erFACsU0cXqThvGma5gOcpL1H9CwUsa";
+ sout << "zKLfBclmCCGrP4rX1/SN7OBpNjbrCHZnkEebf2TrzxT2moacNtJt1cwVUVGhiJ/DYdZawCRUE8IS";
+ sout << "IKDq/tSGREdGSMLEwJdXyRk+4LyVF1V+YF3Jp55yFh5skh3Be2lP5YY4+YiaZDb4IjJh7qxjjO0R";
+ sout << "RmvES9HZTw0GhCa3ep3x8dvbHTp1NB4//onnMx9JheeyZRZOdEK/Suw5BlXNgCvc8UJJR4ZLtyuU";
+ sout << "7HxX7orjWgkHNdix0VnJKSFYfn2uJ0LTPrz3sARjeVnQ3W0On0DIXqNe4thTZDwUFWUKklmQGV2d";
+ sout << "bgVLPmuKff8DHP0xgGyiy1kHollAqwEUzD6bW3Tkrtu2f+LzTN2yAQxKkVsiW+sqEKqUtzNoXVQD";
+ sout << "36tnDjAFotkirlS5Bzk5pXgDpbAxFnVsTDq/TcT0XbIR7QvBd0bXq4am1uoZjaLGA6bF0l3Ej6Ub";
+ sout << "pB0ZMs2nLo/8EHNIgx4cBgXL2FN0DWyb2CzchL+jsPU4s7xBYuVvGA1bLaM/nlRV7DohA29pE5jw";
+ sout << "MXQPYIDzraGVsOdJ0ghhlqqnwPgO0oCaGDxIlBehjzkJcgXPhWarYGGBdk19SUjt9y+aqDQmxShW";
+ sout << "PrJagqBImi4S4N99hOHnIxGa46BFlk72ddnCvaJ4n5LahIZXXAhqOrxRiCKGKBX20/vzOh2ICqUn";
+ sout << "MgfLtH5T87Mho0k7hh5gVFpYktOqOowwqPJp7Q7Qlwty07GrnkriILYvkp1bftSHHyepPxAAgaJo";
+ sout << "jeyuGTtsa+K0jHc8WaDSZncIqmuKs8oVUZ/1YD2EkVlVXIh5NjyDc6RFfYhP5/w043WKp7xqeRwq";
+ sout << "zaMTKjrUr02omLUduz14J+ka9uUErd5O2xPaV2O0LpLuZaMjoZ83ZirW6U6ipV6d7c0B3kC4CcVk";
+ sout << "kUdzqBoH3VVbTav1vugceepwBjBiXsRqReTJBzNRJK4PPu/ApBKQKp6jyZoWCfhJiuJNB1sEjCbc";
+ sout << "o6XEZWbBEjqRaCujvVzo/dLLGHUUOXyGA8sKJq/iAM2dqvDoaduLBnCsp2oBIFw1q0s2psDRLdvY";
+ sout << "KiTAKEAypmqRB37gV45Mrmn0zYeDR2/EsWDoo1HFu7Kh3FX/y2fL0155dtiBJxkHDJIXj7POIea9";
+ sout << "Bl/qheUN9FwqXsVheoacBp+Uh9NulkH01NsKCyX3nV9ec1sdzwh8hj3zWZIpD+ATVH3S9Hh0pt2+";
+ sout << "FHzfu7z+YtwgpWfSqim/C0/yfDF1QoRws09DqLTke3TmnvxoRfkdOwsc5y1s4YR72xFXVrmIRpkW";
+ sout << "pqHx43/MxHaQ8WrEABai43+FqGJmR70fvCNecLDPm0ZlGySkOCrfgMH9lK018QD+DRu5D3n6alPI";
+ sout << "3bEqruPXlP/N8eNnJFEeDbxQ0aQFJFxSOmTkYWr3LZBjtjX1Cc4qujxcCaUH2hnKvdyxejlt8fEa";
+ sout << "h/I2aSaD3ejPS2xz8y5D7Ve0mxx6znMEO8E61RbXztaIRzze57GvfwpTa+NbAjtX43ltW48u0yeu";
+ sout << "5UM25BNbXDdmfUzzVFpiaflImZj4Ga/k8TjlTVURwRXX1GzB7lfdEukFRdQnY6VQcsjzo3O0gKDA";
+ sout << "+ZUtku8C2JpdWVYYsTVoyR+5rNlgHdaGqbqzADSrSqNBBs0OLaEH+pqoVvN7gNNk/hCHN7tvmi8n";
+ sout << "LZQHQ+0Ukp9p3bPTUapH+h+NJ/SXUHdjHrJpaFJR+ANmN8a79zacX7v09ig91M4yRTSxzMzsbQef";
+ sout << "xnNKm4+2JxpFLH2fmM4FM+Rx30KcqLmUdBGOaJYNHQqBVJYYMNVfdRCo/KwBX7r+V9SRlLQ5ijED";
+ sout << "zKbJlHi7LyjYOq0iv0R4MIiW67OR+5O0aHeoXQ5RJ6Rs1Xytaiipn7aXphPVPo+fZ2xH6nLTQfFq";
+ sout << "fEutvtOrYIR8G0dGUSYsbZEhdPC/gpSdHsO0bOJlxgXhNGNeOFtYohwIKP9Z7Giv+lM15GPlxnm6";
+ sout << "usrSaqlZWs75RuyOn7etmGBSg86+J5wE3ihgVXcsprkgs/y5mnP9iPifs1UBfDO+2L6UnmceygSm";
+ sout << "sIOIuMzGUkrM6iOLYHtp15txD6bUYZsIEeCCnjaIJujQc0lvbU9bh5AVae/HG+SfuWqE5nzp2TD0";
+ sout << "jug6A2KoPK/ufGzjCJJ6VIe9v2bG3WXApQGWyj8lWCO+d6YtKFA4agCPTv4FSnS+Ip3Bws18ZKxj";
+ sout << "M+AFJKpr7lmSJURUl38hITa//w1TKSA3vSoMaEFGEH8OFkOCDFOHoGK/z/ievfyBkC0/WgvwcSbj";
+ sout << "uvaOd+M4umm/BCNqcPjZ86HSej9mSb24Kl3Rn2pxywK74b8P3z41Z2yVbRIjK1Dwvo6t0zPp8vtD";
+ sout << "QHW/9v2uZ0VhBdbRAL2+qFD6qKg/sacjR0kE/o/zRR5vsW4K0efFZ8uerG5hLs4rYhZPtiDOHr/M";
+ sout << "xOGIfBpL30xzMdMs2zTnbXF7wECjcklZcwlefbO5ubaT0doKkVSZ6YkCQqVXL9LNVNHqULonUgBs";
+ sout << "Xrbe3KKGFrToxCljsw4+3+RUVnNN48CklcKfEfTHEXoUNZ/p5msufJct4JL4K+35Ecfauq0GOsLX";
+ sout << "sTkVaLfkelv8Uiju3VEggHb+lLdrY7oKT7aC+kyQu8SjtJQY7b2aKQNELwrX9u95mPNZkySc5vaA";
+ sout << "xwL/6ar2Y5/UA/Lh+5d4hGSkQ0PbSacBStFaLzJ8AVdOcXihah1a1ddYNtfH6kNsck9XJqz9Y4G0";
+ sout << "YdFr6DGHNLBlu2ksVL0ta/tfPZy9BVFo6Vz3tbpnBwkPJ1vo9EZrJxo5GJ309oeLz6mYG+0ctXJr";
+ sout << "PC90nYz7s4RxXkWZBzq3RbNiBGNgP1iOU3x9INOf1CbKBhNSp3b5QJDfM+hVzL0Xd78pBpO/XWQq";
+ sout << "hdl2dL5XZtd3XYslKjHDaZQyZgnaoWD9hhRlk9lhweUSB5bFjHOH/FZjlAH8et4Eri7N9Y/OFHFP";
+ sout << "+a+FoLLT0YiBGFsJiqe9udoECrxUR/A1Yi3Wen2xOQv+eMMQ6IcvA5xYONHWrS8WgMT/I7oELwnx";
+ sout << "GK3kwjKy8lQJ9HOmVa0yuQsV5bbrtjQcP72TmuWKOcyNWf9vHvLbWSZUDEXchz4HfADCJEehJzh2";
+ sout << "vIYiVvotktpiC/MzHmHoTw0Sf/hOT8AALmyPqVFGrkZHhLU4GZTqcJEzBNUvkvuwo3qMHzOc3zfk";
+ sout << "VtoQHlg0pOcKczFo/44eMRlpf0Cn4KTcU6vEmUncnvx1nNMHEYKQNGp/IXHpXp+PN7s9zFDFbQHY";
+ sout << "iXURtZZ/cF6jW5TshrT/CgpldM4oi/gn3yTm8AJoqATWeF83ft0AphzHDfz+B0qf2wF9Te8adqcb";
+ sout << "ekFiLrNf0VfA/wMCE3drkqLhZKO6L99EK9SZqsly4Q1rwLr+sIgOrI5v5g82DOFpY9zL8Sxvg7Gp";
+ sout << "03KDCEsW/2eFfxNtGGZ2P6JJOWLIe/6Zsx+1F5lcbYxPhSirbDYp+1bMZvbm4uLWHttHS6e5NQH9";
+ sout << "pW9+qFTlk68lJWtpI5Nzy2CutML4mI1uVDgIctZLSHZOTlWW3LICiFxYVLjvE7JExUZMI5J+3VZx";
+ sout << "OTTeFGiuzoEveGJ34tOMrHxoj3dIMkRN9kh2dN10crD8MKoo1sb2EqZvhKu3yb+XxKKl/RkHTuBv";
+ sout << "JZjCnoTnGa9JltcMDgzO6demBWSN9950HXwS0cZRcDIgG+eDQnfSfHGGJ0UuL9ydA0KYWYAPxVMt";
+ sout << "tqR+S8lKMzk18k7FuGBsFjloOSgsychm0GSTKu+AVUJJohpnT5KhE9Z7Y1iSc1pBOxyf4iS+FF5v";
+ sout << "AzB7BEivwSPqcjPplzSQ1uGNZsa/mCfGy77XBBm5wYUpKDshd4l8mOHPFofPW9WrpI4X2e3LuiMJ";
+ sout << "8BijYVutWZv/8BrhhFuUnngfLsaJBhO+KL1m/JsQEvWIJ53jPt5qgEOn/4Tei953J2hanoVzKelM";
+ sout << "l2OhgHFUxvIYGvKMwLRMPQ4kUH3C1vvBJivvyIZ/JWcHRq/0CCjvg1aPUXSXhBrgkqFwQduoNW9G";
+ sout << "D3BowPV9v7Mk+IPddFxHdQeyHdaquVKxWoWHzlDnv0gbzlv/EdZVUd7k8rbqYgUdPl9os0dbh8gq";
+ sout << "nRpOW6MTBdFojgzrdOHPhdHM+Rs/TIntPPd2YmryOjdHhAyrVgFTS/rx+yDRrP+ChjTPAF+k8lG+";
+ sout << "qvN+Z8gDFq3B2vfIgAUs0rbSMRfOahM4+EH0pX7fe9ylY9yPHtWKs9i3mU+uVTgB5ZtRKuSKkpnO";
+ sout << "MgWOp94H7Ui/tI7b7hAC673dprCx1PZgdIncutPRIlxzv8exEVhjhH5AsW4gRNxLX9pukC5fVdXF";
+ sout << "nWif4xcZbEcsW+mm6IV8NE7iokkhEC0bahxpHeS0H0RB28nZY2idhHP6hLDMkSg9b8mlJgqcGBtj";
+ sout << "Q2Vd396H38VydtreELEV2FgC6Ski/PFP37rc3/NDStzcY7d8twoEiWa1msraDn7zlvORuk+Cu/Ec";
+ sout << "VaHwuWQdyqf7dhOhrH+/wK0RTPMKHDyLxkwJmO31Ka0LEOvqMcsUzEaxAMUNz406l0r6pLW2MbP4";
+ sout << "AdGu/pH2HwxZ7b1ps+8JiOu+U3SDrmhMZH7kS8LUhCNpH21Ty6QzgJcaw2jC9HYUoNeobJSQaOFu";
+ sout << "kiOHd69hw7VbD752KMiVur1AYmRegfhkEm9850zS/AjT+SntphKTtFNsc5cR074M9iucA3cH6Dv2";
+ sout << "AUSKvl15U83cbs/B8NA/4iSp1OTEb58O97rM0iLsWyBLSq2Lx5ZblFZeLi/rPkoVGMo/o9UV8pNa";
+ sout << "Cy/ushHj3CMku0REQZQfcxl3OgQ6U6avNywmwdncjoDCwX2j7MwoZ9SrAKvxSedrSkdRhMvVkuJO";
+ sout << "l9c6Mp86g8KafJQIp2ZS7CMC16uoiaUZCqdcdIEsKvzMreCoM+XGxXE589e5BsfV2gTuxpvnWVCs";
+ sout << "8BaKUgLnLKCHzyioUVC7bzz8Ov1/Wqa1LHzsrobPw7DV+dIDHqAS0d7TRra2q6rXAqYA/0P99NTp";
+ sout << "uhJkixu4ACmGbuBO2ob+t8cdczxIBSRPHHZNCXQG1t6A1IYpozEWiFjATpoD+gEL7Mv8AZW7AF3Y";
+ sout << "wFQEt2JapH373jpvJ/lAIt33yTMgyTQHKQ6bsTOWpK1EK8NvnOSOeiFfmtseS85l33K8vl8pyXBL";
+ sout << "Fx3dqms9fdbwhUmUWGcpzynCrKyV6DVGFznQtpr4QCl67Oz7RaaYXGe+lOmyrBgrnjKcQk4ue99C";
+ sout << "ucZa94y9SaRBTFNbXtpRm9jjdzoDmtd0JPCGzSACGikNArk4gC2AVKMJpFdJrS3e11Ew+pprc2uR";
+ sout << "fq/nf3C9WQ9lQ+KbpwEokeXHWNmWBwDZwFCf2w41zxYqjTu2XjGt27ea7nleb1IA1PtQtcSz/qoC";
+ sout << "6A7G4EFk2WE6VNL7/Wv81DjrdTdYy8DFQBzab3Qc7T0+T+aNZdNfoZSj5YK35BB0mmPbxw2U4sfx";
+ sout << "1nahYLG8OEQgoOVG/3vG+EmA+iCAcCBHPObthul5KdW5JXwPw3AMgL0UL/3GIpXRrzE4JTPrFfzX";
+ sout << "zFmbikWuDkDw9sP8UNfHAOGWE+OorN/Y43rcSfVA6xNgMhqr0pBbvVTIXbeIy/nR34IJGR5xN6YS";
+ sout << "G1SriivqytwRmJLC+Z4OoSVtqNPzPvUBzs2wHPF00gGAE1FpUa/CfxOfXhbx8VoZWMcuysBgpRCg";
+ sout << "Xne4u7qvre3CRkuXLazM6qKDt47OA1is3gG6ZA/XCvcI8EA61YccN6zTpFNT2cBGQNQ11tK5jqEV";
+ sout << "JeONoq3L4SCF6qCiCKMg0nsc6sk/xJ5Tam9Rl292lzxTV1Xa3be+lfSjUdiWnmbU4ml2Y7FEVbWv";
+ sout << "4A3WHted4mKb0HiCz8wgbHrjTnNQyIntFcXuNAUUzri4nGsV128EuJrPmbYXqWuKIe1wPT2Lbco+";
+ sout << "7qH3xg/zxl+t/bqlyqghqoYQMPw1FJXz7Z1TMI9TNbxUtCd+J+mX3xkrYI5AtCbTZh3lMvbwWJZa";
+ sout << "4AOY4WRavT6XHsBs7hKl0iHOmTyMe1bkM6sg2BChZ2B4T3BoeFISya1Qn2U3R0Y61plw8OFrTsAT";
+ sout << "FKgD4pSc2VaV5bOPM/vqg4B1In01SOw4itnoIC9N4+IvHvzQFYzeqiueAcHYGss+uaTMqqc9c+gz";
+ sout << "cgiJCv480X1iiauIKK1Szm/06GiT5WWnkyqWEqxmNyMPrj125XXg75AZeRDazTWXjObzZkJ7UA6i";
+ sout << "YQRFCFH0enKb75qIjHQ7635m/Xegoboat9VCJDGGVNVG58hQAVMAWHmnIb6oK9t1aaqXmAYXXgFo";
+ sout << "3uchUubvOjQXN+txW64ZRXzM5lAw4TklqmhDjxruXRdddoBONkvSPSh/XiDTkdmO6QKTCh/t6NLo";
+ sout << "Jb9Z7nfmQUd4adkS/+aUfb9LNOPd0GHxq2O/y9XXHvRTBASUV6xlJzpnOBAtTJSDrbLj9NIhiWpF";
+ sout << "nsQ+Nq9aEthi7RHvc6rujDGF1gqfJjsdPUTxveNk22Xc0fe4Qm+07HEg0HvhfFWJdFJJHirD5j82";
+ sout << "bZ+QZTppInMAQRh+dxZhlCzPKdqS7WPS8dpXp45xBpYH4GsG7SaRgbCBm8R96j5iSW3oYd+ql99U";
+ sout << "OC5fYGDbbGT8zl+mtYi23CAV6AY/izASsZHq7KzSV5taKzEZyJXf9D+wRaopIZ5JPPtmi/fr2HAC";
+ sout << "XyKoAPNAywLXUTtDezuKTJiyy/DBlHKxloa3uVTeG9YBJfiloyym8p9GsCOcz1p0s4K1Oyph1NCo";
+ sout << "CyklDAHvlMNjTA4hkWIHTyIStVmgvVkNN93qpPYYNnV+fIETW0tvJDnyJ2OnHVP4h4VBWbntk+uE";
+ sout << "JDG+DLleC5n87Kk+e/lurPTik1Yqpicwe2ct0AgA9XJCrDO6CYGT7QzFhEujgOYqzUmFe+KGn9u8";
+ sout << "4QMY5HbYXY1P9V3pR/QyoyxtuZNU/caTzRM58Hxe62q7BHmsL53db7cDUE5dYVR3Lx2uK/pm3QWH";
+ sout << "IIoyUhcLmk04dLwoi7YAhnao9IJQ12KlFISFoa/3WvhWbb5M/GttUDXGX1A9JuGrhjzXMIfGyS/u";
+ sout << "wugpWw/5SnWaCis9sNGmmMNC2rniqemMrjAUv5mGPYA57KyKzvVGGKK7VM8qyrtwSKOQ7fWRevDE";
+ sout << "NVAwhR5NnzdQmL1x2UrFcEkARtk/8RU/tfy3nXqg6Bi8g+myCn4hH4BOtK5E9GItI4R68YjNDVBR";
+ sout << "MiW9KS5gXcigTV1fXrChyjn4Enh+GrdLBpWDJQr7MKSVSmqGXntmhyTAD8Hgi111whRmKXgwi7OC";
+ sout << "k1Jn/NAURbtbi18YCjCc/+NfStTpuMv01C7+iZSQbO2rzP2aqaYSSvIx2e3/e1fWfZfTJBMQzcfs";
+ sout << "zITZqhDilOIDxPPYv9xAkBxnwGXIkPAi5133yWC5gcNh9hzEq146cKvQ8BFi1RHGL/yOiDjADrns";
+ sout << "Ky2PGu1fBfFNfw7tqF03f6zNB5IVWVjmnKtXhM2bIGqI/EM6Qx79bcYKYXcFg8yYbTc3z9KAcgp8";
+ sout << "es3R4Rwfg9irMdI0DK1CyYnpoa+zWhUXUzpTbSlFx0zZ0Vfx4xnPh7ZJqJ7/U/QNAU3lUP8LXVAP";
+ sout << "+jMIEj/H2mx80xaTNCDp/Sl+QcU1jZYx1Ycl1B4gH2/wsYYr59q/mDD0gE70hQpsE9ODlZzx9hoE";
+ sout << "HFRmDUTI0um1u5fLykqAJFVPcfk920vT5TqEZSMX0WvMVxqvN92IUxy2JNpXXMlBiekCZhEKKQqg";
+ sout << "D3MqZCUlGFkFpO/zc35vlTBUCuTvbcnak5HVcREFV0yNbTyukhEJzcdZArcCZs0xIbM873u1y/mZ";
+ sout << "+dUcW4zZJ4jNSa/Vz3W6sj41hSQKrQfPyeeKpFd5iKxJPXDxV2iuv+ZBgJi584KVpYSVOUA8Bzi0";
+ sout << "0kQh/NO5BdU1qaekY5lfrpA+O/tTzkTL1ZslaaBimJapn5KZcTtsZR0YmrE0E5okd4DxUccbElFb";
+ sout << "81SuaH/YCvib5kUbkXA0ooelAU5UmmMCZ1ArVVgwlLti65IqtSWD3LB8rIOUT+QKutHVEOpNMOrJ";
+ sout << "A8DnPVgF2kmeOngOlOwb8Hgpg9I9YCuu2cojbmROM8BZh2V6E9oiBTeKPgqgZ2tkgqGSf27SEpPo";
+ sout << "xHPyKMhQewkcq3WQ132hIb0s96PdEfniPUSig2MLn+5qBZDFLHHuB6K+pl/6OF/h7ZaAaCa4NCA9";
+ sout << "32ecTvY2MzUHmBM6CI/QKLRtBjmE09TB8s/5/X++k3cde6xPE77c0/G6qLrOUKFITVb+iLVADv3O";
+ sout << "zhj95j7URImm87lmo9ZBDTm6bbKoQtNAzEpBAiGs/VDuFfA5ZoQcwTwEnhmOs/4bT4591uwosx/T";
+ sout << "G4whvzgfcbPt9yHc2nnSSrIdYTSAwFZ1Ksh36RgvDXxaOHozLvlduvPaJkYSk9HijbmULXMUcyvD";
+ sout << "6IrzOnn/8NY6m++hE3KxgFM+hlrlAp3qQBDEGj5FAYFu7AcCT2r7YB5kiuwjM/7J8KyJYWg7Z6Nh";
+ sout << "zJbYleW8QA54s40d7D6AmOBX4+5Mx4e0roOM7WEFoqvXqK0ebAOrgwZ7MmqtGwe+5/bqkfNqg06m";
+ sout << "o0UCVDLmF/SzqAsxgIJt1P4/l8HzWtrFINUcBwcDY4ZhV1fbWICOmej8aBMJ4j/iUHLkxbnzQFBr";
+ sout << "xuNgC5/5I+iLjiYTd5+bdsITz2aRpXgUHx94KdOtcdEJ7uLjUeo7jeYvQVt50x/V2thmd8zHebNA";
+ sout << "6Yj14+3dHQH8c29EghfihdNobQHSyw9eVo8RyVI40oUxykuk65ILT1mLbnASJcKIInvh57YMVTRv";
+ sout << "rpPmXm/b5ZUfTMbzpTwO5DThvD+TTGvLY+eC0awfepSJj9CjiBTqNBJDbd5yKVjH+oM0wTufEUgP";
+ sout << "Fr0fondoPNTB61iwLzHrGVbzU9Rl8rsuiQOr6Yz/Dj9OjgzyuvDb3zMilAEcLydUFmkZlq3uVCl+";
+ sout << "LTYgz2tAQIl0+6SWZCel1bGyRKY50U2bJO0dbHawgGrUgZ/OO2k2HJnC+0+WKZeSj4s3v1OfCZbq";
+ sout << "F1nQDtk64Q2X857gH4kTx3MQ8gZCok7M+sd3p3eA43nUuis/SwComUQODyRA+TmuoE9AdASWTZQw";
+ sout << "Vz/To25UixKQtov8has8TRMEPOWqtZGHg/1tGGyZKkTbHQlulKwyb9uusuzC+FAQUEtpTcXkQzDE";
+ sout << "RNHKa/I1/zTSLVnaADsLgrQA13Igh5w86Bpn1bFgE8iEW5zQoNs6KDYwsRQ7mMiJI75ylVIvfbN1";
+ sout << "Cp7tpGCKCL/8xtWWII5ygWogcGhi5abUDxG0WQOaYk3+kPU6w8OPpbaT7h9TDO94XhcwUb5q11O7";
+ sout << "LAKIZY5+/7DhdVfMnplHpLNftDihrGwf8r46FUrZ8MRHAREnHSsch5dYY3boGxsFoNLc5r+fbJVD";
+ sout << "TxslZ5TEbJCFrBtFqbT3xuLReyo4FsOPZd0N3ZtFXt/ChTutUudGtCLfD3ucoJohZuz0Y5Mq5sHQ";
+ sout << "Kb1vAx9sRwpFnpO5o+RKYaGh9QkdrPXqUlcyXkSrXeNVfGGyET2vZB4VtQEAlG0nF2dkiF2uxKbc";
+ sout << "6swYBEYWLaftOzle9xXK6NrwQCrH/guAo10Ct+7uLmhK6P5MZW4kxyLi5nhOPkB/jiASFRPN1NOw";
+ sout << "KdNeaakEO7PEKPdcIhWjvw3sePbhOyHnmdJGHRB5WUvjOqD553+DFu4pgpUPHx2PV2IfEF2Wtfvk";
+ sout << "1J4pPO9DvXw5kBSFmoR/VdU1bGkVfkDMafaHt2diRtBZwbJb6XgkzJsuSiEtzaZ3BLaieOYJKCFf";
+ sout << "LK0/Jj4SEEDYj9NIHq5MsWH9PyVYvV3+IpJa/AB/WHXF6XAMOsLR7ulEmViNjv3UYOiWYEyT2v2o";
+ sout << "hYYg+6/lbwOg3frwE4tHu8IMfIZZzoFHbfT6MnP9ajoZWuSbbIj5KwvJ27wLUsQPevjbTypUXIEv";
+ sout << "W5WLBcBCegLZyIdElFV+1kJmSF5dZhY9RJeDOs2/CpgRctMX8hUGtMjLIAMjm2yzS6FgTtuuPJdr";
+ sout << "ErWoYhOWgGSZEitvtLZn6tGl+JZghT2F7dfmMaJH8y86txR7YCaPQgThH1zCh+yE5hvZPXW7+OCj";
+ sout << "D/MUIXp648v2+9ItiAlAZ+ho+JzhIA1jgC3h5fWWmEc7RnWvtyiatDFAGo9Z4UCz1qtRqoKdlhr9";
+ sout << "loA5N8Q/E1vPO/G3X++gNdEU6/ykRYyAODNhxYTnkiT47qRUzqSgOwsi6ePyQVuLDbSCwcpZK0ps";
+ sout << "qR803wEQBh1/nA3ZUeO9g7ZGaybxNoTj6Qs0u5vRNudI6OkjlUMMTdXVWf5ZyV338dihwHho3RQT";
+ sout << "Y6nYvFbud7IP9HMA5ga98s8z1ImzGbKGTMqy15rL5/PVkyddf7MArGuMSKWR4F0njVU40ILJh4Ns";
+ sout << "xkuzzPGZHb68JdMit3OazsaLvkZ6kBlq3KwWH6BISfiFv8eLYRxCKSfGWoqcw1qlUaFM1MQTJ0Zf";
+ sout << "4IK+R9+6/GGQJmSEDCyK1t4jbWDr6pjoXOIsihE/hsJj+Uy0mcQ/pRQpxGc9DvL2/7rA09Lr+Hk1";
+ sout << "xmm09tLOMEtd6mNexizTXHotFeN0+M/fMRwjIUY3lZSOHFVhb3AcNRcVE8IhsOnrUu9SuZCP5cLI";
+ sout << "xpc66aGsMww2jY/TEMBmWn5IUpX3vNe26iJ8kXZYYqPKz67I1BmH1wOXBaHZVznvMHXBMF7WhwhN";
+ sout << "gq7Pf38r16eou+L80O0aAaHE840lzt+jynl/vNb5tZ6F75G6jIPW+ARUy002bD+n/k1lnmwZ1WgP";
+ sout << "WnxwFloLan9gTO8dZG6uPyDHL8HaXU7cpty67AFbDMnYnajqpixUyDYCZeeYDvcCv+lqDm3YgKPx";
+ sout << "XQPEQQP+qpwvPcwLIebl3F15EhdJuy1gdwt3Dyca13/+aQOq4Rd4AYFl9U8ehlJsrUOus7Q6xohm";
+ sout << "J6wNJJzJpnRwNH+qekXnqKqRdTUwcQ8JwcN9u4sUE4pmBWS4/hJSYHpFVaUbMPmun0Dou54UPzod";
+ sout << "HFGXXZd76cZ+1MqaoyofxHU95d3rh/47EjPtQInAKAXmQlud1wYLjOhdDwGyvhPN3XfohEPrH+s0";
+ sout << "n/tMDShXPyFIaS+WLQZbaid5LuarlwA1rQc5cJ16xtSrW9sRjpEM22MxGmCyyuR5mMhbYLlULR1R";
+ sout << "b9UJ6/BbJ9iqO2LOh4geqx7Pl+kvdiOwAfXLP3mFZ8PHTHk9mJMa1TlA/fLSo8O0YqHMTnFe7OtW";
+ sout << "qMNzs0Pz4ucJ5uGzptFUXyCqq4vmSS3Cx4eolZv36mgGa7Ll2TJDCJckb6m3lsroRCTs6zF4B2yN";
+ sout << "VfTekTPXttFqhi4GI85Y+ZBvMoOIAsvysWCsV31AzKg/qlb2+dOumyMScT0GX5+l3OMO0iqG1QXG";
+ sout << "3fJZOpDQHq07WuCisGH2DOO1sEE43+d8fZHb1l7PQ2w25w6hq0IA8u73oDWRyTr5IqY+oDM5d2K4";
+ sout << "Ysuzkl+odreYxlUvxCjwgG1UkXw31DQ50xIwpChB8rEd/5gW10lnXK8XTbVgL3fPuy0cd74toIhY";
+ sout << "JFg1jyA1ZST25w82oJBYq8f0NQtHc7bK01cS4OM0hmOGd7PDKiRGiIrsLcjgfw/J/jx3mBuEaR5J";
+ sout << "X0NRxcrJZN0RdwhmwfRIybRGdSX46JVUCrf15yD6vFi08cwkMxqpdn6fDfh0BTXzhmdJU0+219/a";
+ sout << "JFJBFD8iktFAl629X8denKKlc2sMyBT0IODc2OYlS1+xn1t13uEeLar2l/EgG15/eeBsELvM+M61";
+ sout << "7uSI7t1szDmeZmWrO5yVcOBq1HFoEmHIlDJNtYQ7sOqRiaJehiRNhnj/9umPDOWCmBdnIXYAxWMB";
+ sout << "TJXpWzOOeRvpFNXUHN8MDttwWjx7Osmal/5ZqIbX66+jdQrLih+L4//J8oLyCBjawxIj+4BVLSpI";
+ sout << "/jyMUEbzBmRM3HiOoqqgiTFlLeRAM666O6oGhNmrFC/Zf32boZ6YmU0OicO1P1zsaRLJkvMbCYaU";
+ sout << "Dmi4reAoKBG1PBqaWEhDE2VmjiPPCs2WqZfll85wyGQDgKKdyboLm26UWwW44oM698E6stYbg8cv";
+ sout << "P8eyOR8awT6WwPgOS3fJZFuKTgN5LXJxk2BVCzx1F3gnmMQ5FHhIVm1srDDFIIwSkuP9mgjN9lEI";
+ sout << "SGCLsFYdEhVedNZuOWMYCvV4RChI5XCPh0ZRilVbCP4kkj0Xaqq3SbNWD90lwwJvFdIkZOEPeYc3";
+ sout << "xfLARYjHqHbNkYZLpnTw6BPH8L4BCDl4v7ndyl5Ef5zvShk7L2VIyKUK+3lboiVjwDoI+k6A7iNC";
+ sout << "UW5f3fTA9D/s1hxS/LxaFU+9Z/bOMjiRaPLQ/ioOkwyrUJpsy8/K+T78Vv5XXqtOct0aXrtUydgP";
+ sout << "uWZKhAFxJe5467wlC1aja4hHwyEeQmn0P8ilBq2G2hDYGEnKEF3XN7bwyFS9RGFdU06jkKlop8cm";
+ sout << "s9bVHFI4NeItHNO48LVlRFQaVdq1p5vD5elrH1h+J0s0BzDauJaraINeTlF/PrpGzdxiA/vq7sB2";
+ sout << "DHPBfrxe/yTTu3YTnSzMYR5mvPdjKh9mH6PmzTYyu/lUN780SUyW7mmEqoNT3LcgPg6QGUfg7hqn";
+ sout << "HP7ml/WtBRhZh1eLok4o0lVOhX5S5vMkXFa7K2pObSkGyyItCW+Kk9SkH2BboW7aYomw/3tAd2ce";
+ sout << "FUm8H6Q9NP8KDpaebg/67KZijw5kCSH9dBi/XzfZY8vzWGPW6Uj6zpbcDfRQDb1jwrPKZRhQHHiT";
+ sout << "dU6ZscvJMqSdHXsCNByBtKrUX37K8C59AaEmCdshNlaIOQLvceIIpJ7XTfmtnjSSvK0zJzBWt+xf";
+ sout << "RdpAeZwDNK66MCl0DIJwG0aRK0nwuePf0f2k51xlsrnl5nY5rM+hfblfsm2BKH6BH/SYlUSLxQlC";
+ sout << "MhdXA3MGhxN9yuHCm0m8WVc4/4g+SCKnp70ZghQORzaijlU/0KS6P3e1po+y554gEqicyXXndnwA";
+ sout << "qmRp58+1pxbwORmCQIdgriAZxPQl77hXxCMES9WQkBVb0RKNnbWCWvVdkwxxYHBGhO+m4kZVjFwW";
+ sout << "0jxlEcKNa7WdQStRXCap2VDFA0c5kUS7WN6R9HbM/u8xMNLzzE6jrH8l2h8QrLkjHE2R5y/NIVAX";
+ sout << "G2+cQp2X9XxHVWQmfflpXPKcDGGYR70ALFrgeRkQQAxfR4WBDLLQitSat8H/q+yyl6hdGj5nmYXW";
+ sout << "BRy7URAh0ibk4K/Bs4d2mN8ivkxahOB1wPZS2FFHP/I9C7nikHmQwlg4VlPJTwLp25JDsOypt2sH";
+ sout << "J8CK02T35fcv89Dg6GKoIp9RFkJ2RcSUXyWP3JWFxH/u1rdTmOlVP7mAW4MgT8AQdXWXkfaJxq4S";
+ sout << "n0ZRo9RT68RvAcrdv2z6Hwh4klmlRw9qFtokZMGLk8BOtPGSOU/Av3k0RjEsqHLiQaRo0JpnBCw7";
+ sout << "JcdRwIuqvfWRJMpGF6DhHkIQCFZcTSftnNDKUBLVBn41O/VnUCakShsRIy0a3hjQpg0M/7hFbZI1";
+ sout << "wqiEu/aR4jZE3wixAJhXFC86bb59UwlH6TZ1Eszn7eH0inEqCSlMMRKTo5Bz5kybjP8As1pkPS4Z";
+ sout << "/A8L6SnY/HIm6jy1MiZJmzdimBLwv14JNZ2oUM/TWMzgnFayE0C1dwiX+75KOrJqLl5hnM/5r+Rz";
+ sout << "dB3emOb6i0OQI444QHxHLK6ZkLlJr8kY5opZlAfjyzuWzJUG9p1CXDfL4t99GOoq8TebYYdjmbyV";
+ sout << "BLn51c8VBQ63UM/dzKKgL6n5aUWAenJ9sNV3/b+fmN4cPWAv6TZQTDt6XZxPz+GT74rFhMv5YcwV";
+ sout << "zkuiYPllmx7WE0pegs+TV7BpKef2DBNlo6D/7ELNsr7L3+rz6w3l+m95i8jtIpeoPRzNxMWfvFZw";
+ sout << "OP46naSo+j+8Euy3CnMnx5HQVwBpEeGbqjwyhsKmRyev5wzgYGe4mxNTgrTKsVh4exFOYkFJovWm";
+ sout << "l9VKTRY4/Zd0H4dodHVquSWT/i6ogNNwB1O4T0I8hs8T8i4XybD4hM//dNvbPxq82HobB1R77rqz";
+ sout << "7wGi/X9MmX3ct7FIpU/sYUT2DeHZ8QFMfiBhSLqoHcJj2RgNr4cX9ZbFXk2nzw4Dz/ggTDOx8XRO";
+ sout << "qLovxOk8Sw7KXBDVmW1/kBhLVN/yffDJjzwwk/JqiUI3kPaLOiiJnnLRH+fS52mC/+M/xg7JHde/";
+ sout << "oGmlCCGQb/1q2j7he+nBCFZM8F6BVHnlA7DoGCzSetTLkOLo4OZssfDw3OSU/2vxTxkZ9qsarMCF";
+ sout << "uIl0MrtO59MbXtiMtGS5gcZL7Tu0SMDJUQYFtoZ6+QnDgHD3P3BsqyXfjRR+DTGvcVSyh878HpJV";
+ sout << "3R786KM3ZwW8iVxFabz1OOFx0Ox1fCspZa3L1ckftSTD6ypZAN4f5ox3Ad2iY3l2J7iPeE6/a9sk";
+ sout << "MnHQmuMy7pMum4VVepnd3lfqJljkCG5ptnznCTmp2wPgkqmvzXl1UU7kyYrdMt1jTzg6fiKsbROp";
+ sout << "wGSxXLbRGT2sFjU0KExb/rjG21a0NVVp2PnYMQuyF6glGmCeSqohLsV3nv7eCbUd42IacVWAR0qN";
+ sout << "c+5Dj8hdAmRMU5DJrbaLpuHeg0gouaBYrmCYGrcMqcI77VLyeQGF5hzq6t6IhiPk9uLrW2tpF5Df";
+ sout << "gHr/I94WMSy4Wf0RnACjVxSeRewTXJDuA5pKq0EqNs2RzmiB7LoktPBzqBgMjCRyKlCAzwZ63XOt";
+ sout << "vOvKGWCbjXcVEOZGAY3s4lQRiNIJ1Wat62u0MSj9BERVU8kd/NWueMn+tbdGTSR6Hgyk9bvHF+Q8";
+ sout << "CdFQntiNHHU2Qx5ZbzPPoNNjqwy+FGicBBG8/xV2bPd/JWm3uo/8BFPqzORzGAnG/BHSARPbEvkJ";
+ sout << "ix1Nw+gepwJ/M8YNp2aE+hYTBUBJueaSPyPR5FS6ocR5oLd5j6biPlTv9gHGg03Wc4W3BqrC4/He";
+ sout << "R5sSsRThAGj/wbDu6SMIoT+2gUNTbUZ7Lx+8rVcWGY4OLaySvre1BzyhGPpNDuIhmbW7n2jfEO9C";
+ sout << "MYbtPL7skgqV4L9JlLup+vbCNC5ccOpxm0Y+XSA/Gidd+aZu9D4R9Ddz66ns1Ida6bWLFM7T8BpE";
+ sout << "dYgcQBJ63ixc2nYknR7w5Ez6JPnmfUSDjXTrY+tnXlbyWIrF1fI1Og7V5i0d3G2jcuitqs0v6SwR";
+ sout << "rgEDVnVz0O+cd56TOssSv1d2KPIY6tv22oH9P1sNzjBYN9np1jRH7e1GpnoEvY7MA2gNyBIWdrKA";
+ sout << "uUs0c5gYr88/DbJvZtSnmPUMRr2Lp/RtOHnabXkbAevAovdIvmh/BfnXnSjrOkH+BH3Ici47ohO2";
+ sout << "Da/7OlZj4wAESZO3WO0SKN4fUwC6M6LUM2L7kg1tvqLA5KtYokFOc2dMSRs8qz6qpneGUv/f8NMu";
+ sout << "PphJtFjOixUrhCOZVv+0dN94L3dNPRyjXlDp4EejgUxWdeYVPxiyXor+LSIP/s4yPNmn6wIYWF4l";
+ sout << "hIFjh3drkdxwsaS6QWeYL3knA6I2rR0NERayOsVKxcVftkonwoZJHpdRkhDhBj0Sa8TicGcSgQt5";
+ sout << "kqnjLvOqNA3A4XkXj445r/HiNWVbd/v3DRYn0lh09HU7ihGo/qrC1DDjIQjRm5MO64mM9xZY58yU";
+ sout << "2fHWbglRconR2QeXmQ1D71aEvNZwBs1G6s1VqbCNgpeA+7kvjlzwFzyvfIqMuLTlvUo+4uCHf03O";
+ sout << "im1AxMxq1zvmRPM844aIenx1bhx4vKmeEwTeHKYvjUkTCMFqOXxq0UeK5XSKJkXrK0F98MORKeEI";
+ sout << "lhpQXL7ky8QZChU2k8DDBaocWzF9oRtrXDY6vfuvcituo7gbfVIDzQrp4ran2eY/pvucYyn/Maai";
+ sout << "W5IQzzordTdlVKX99bpoGJrmFzYrrpUa7xxb5Hs+YvwicR8IE/2ePoZ0t04dBaIwPP3ZuPO0G3EA";
+ sout << "2XGdG7f9AARr1098ES4qU/JRanR/5XZY16deNuVPRKMdUEBMKijfHoQCCorvA2HoQz/jD1nhg2ZR";
+ sout << "cB19h0shhjnz/OVgb60bN4izBugSDZfyiuZvvdpnCbc7bReVBCM8456EG4mHdI1NSdGJfATpvYbT";
+ sout << "lKBwrESSbmkYIVCcBxn845i5lrPHMdlt0s0qwtcqBTvjBmhRpezoF/U1BnPdETs303TMgApql/xS";
+ sout << "MWS/Jme3ioI6E35BzkTXABoxIxyjdFkFTququq686oqyGCSpCxlJA2OyXwUc7XkBI4Ki35ewV3Yh";
+ sout << "DbONeeMe9NAred9EhUBg+uLO3W8oI3094ZbMa7N9b0tVpewsJuqP7nfW4hZtLSpFEtjlQVGsqfQv";
+ sout << "TqljECWVoZ4cbclYUXTMqtd/5okENdDEk4BJnmd5b091PoGNeSqYPf94LVrlyVSM6G9DmQm1UTFr";
+ sout << "bUipf0LcSMZ+DKYhwIJLpP/GppEMORYKRhDtgPy66GNfhiNKzLP+fyPpSf6LbtCUbE3Ni+PJ8zux";
+ sout << "k810i0by9lVlFUW9tNnUcSE7IPrcFs8fozpLwNSI/gTKFy0mNEnHJ4hlSgaVB157PLKmHSv6WJIe";
+ sout << "6LE89xQbc/Rl2TRjh2SxZXB62FoRLXApUFcSdCl4ZRdfA5p5zXcQVWxl3RUdJbpUhsqNA7vmHT9q";
+ sout << "qinoGtKuPZvANZr9Yn5hB8zzHLiRvk9NHhFdR/Ibw2gGXq6XMuJmrShSFRMZrTiIVtQeBWZNzQBL";
+ sout << "jViE75H59XWkpeUM8hAUUawEH+CFcYynOsbTECPSbgIbt1w6DRB096vYoMnxnttX3J7SIf4JtpEZ";
+ sout << "iSI/Xzdcu9L3QX7bQaHzaf/FIqWvdl3quN2CrSnBd/UbGCdgylgCQyBtQjcr2lP9O3QKav9laEz2";
+ sout << "YJbjjXldgdumq2W/0rTopOIX3V3xCHB6xkZq1SS773LCHWAaoPRMwgfrJplzRSNIaD9fRsVQuS79";
+ sout << "TUwphVg4iGrl/0u8YLEr9TmSTojfTS3M23HNJU/K8l1zOB1gfCYwmj6jGxTCuvDHvgpJ/UnViT2j";
+ sout << "fnQjGXg/WUwEoaD+66ZePipeu+PlIHDSX3Jj2+jHAmMMsFX7PZlP3xhWUyG6U0hJ1guDe4HOcMWQ";
+ sout << "97gJDdYzwSE+S7vB1LQWKNdEnZPmZ+rQ79gpjNYVfzE2djCO9Sn0rQD5W6O7Wd/QCV+qu619i1wN";
+ sout << "IziA40bBSmDLiEtZv9Aviu7q7Au24X7ZcXxRqruz9zKPSFd6dhpWAK9lD8PCNwUrDXAvlHT8pS+U";
+ sout << "8K2ISz2knDjOiHIb1P4jF374K89RNSNvefvkGBNrJ8H7JiseiuhumYfAiSVqBQwGUeh8lnPq4aeQ";
+ sout << "V0gFbCj8kNEs8/cDLBnnwCGZbqRfSGgSXTKWu3/oSmn/bMralWdW35AgMM2tJ3Fo5fIkfzsq4ykT";
+ sout << "ywlPUtOOHIS8ImXHCQgVt1+/OPbTpgCly1i3ssNeIkjPY5DyGk08WWYiih7IAKxjFeHSavuYKy1P";
+ sout << "1QMI0dJ3y2Wp717Zg46c+iKxlg+oUR7It9UT+ePKJCm/UAayU7LyOWThMfP73hM9k5G/h+8oENw2";
+ sout << "D/Kj5IkwB3iK6iC/6QrZaIHTWVRNYioSzsEndB0X9NhBMlcTmyvPJfctFydhK5JjQwjjx96k4FvS";
+ sout << "P3C7zh1ww1K/x1J5vK/2NViZxgSFmIz7L3NbwUb87eXPictC8NVReqskmSw8hxNmgTHG/WEU6NZi";
+ sout << "XRI+HIV5erFIeNRIixP7gp1z/o/5bL1QH5ALgBk4TRc1B9qG+fcWfJg7o0NckPbkkueM3u1J79mu";
+ sout << "pcs18OQ2A8zehkFZeaTCC6Q6BRijJFBpPNnDIxJ7RDazpi3i366Hds6mD/r8IDRT8TbWqedF8bNv";
+ sout << "vGRPs1aEWykMumZdtkRZXfByK14T0gF0CSjUeIRnPuhXbfMBznBUGue7eCj3jHnLynEKdtuA7CyU";
+ sout << "FlXmJWrixsWkohwNSY+2jCNgmIWsUsvKS+dlNktC7OdSo+QEIDnCfPio5AwCTWBBvUaxwR90GeWW";
+ sout << "yk3Rob5grT0wgxQGoCoew1J9DJT6iLEuy/jJ4FjFSB581f5186b4lOYaMoSCpOp+uWEz+I0c2G0o";
+ sout << "DZKFloHMTpZyEt8LggzchgxypR0SGSFxm1X0/lhdPwa+8lKRdcXmhe2ai6H0R3lhR9ZTBaCWhMMo";
+ sout << "GJ6SvT1ttMF3rg9qfFc1RKjK4+TsRxwYOP1XDz1rjEp7KWHLxZ/JKzp4IXomPm2f6exs+miIPME/";
+ sout << "yo+Dtmev/tYJLd9poc+LAVkFIDqQ5ID08LgCGwAwLz7J+nLxSww2OjOxhbVfRIm0Bj0ERLmvy0jv";
+ sout << "9Oz6hHcYVHPqSGN2aMsPtpWUWOgND2ofYfAJqqUMbwalMYUyaimuIU9ODyoT0xIMH3VeN/nrIHDB";
+ sout << "u1J1iY/Lshk4okLUB8OnbtJY8t3KATibzVtoLaoOcmcQzOJahRLw861zYSlCs7SQpAqVEoRjYtZC";
+ sout << "5JPGKLY9zqoogM41u3uy16DfVEvDYr7XZm9PRYY7H33QZiOYhPRgzZvs9nJZAJ7QGRDYiNQG5V6a";
+ sout << "Zc/qLzF9UaRbUOxVKJ6FkdhuIuK8d38cN1ky2uUM2m2cFAyJmzKosMn1+ZKo85F097lSyuDqgXvF";
+ sout << "hD9fu2ze0hTo2zqFmHHoDvA/3dN3Ilt5omMSSf/HQdvOeXj/2vtmJhpnERGJo0NCiOZfKI5QG4qP";
+ sout << "313wIpwQPZxAvJzh9YEHdb3fv6GpwlEygu93G8Uu4WZkT7P+XXqhH5PL3cZ2MYQeYGR70nvUzJWM";
+ sout << "lz7g/DvPE2aQHxGSlH+W6JEabNL1YUvAim6/uLWXvjbU+QdKDUQsml0P/auI6oHYMAgXnifheH85";
+ sout << "xyT4TLqA33Bc0SRpwPV6lIigxQg8OEX1czyeYInQ7Y+YhaPF9Dh+YR2Kf6xLS/qnhOJrAr102gf1";
+ sout << "apYuPnSogxRcir2pFtltl7k7xjaAcfcOUfM10VkYOoq92X9XTfbEBbeEAhFtw5AgWJw/iryapVb0";
+ sout << "nNPvXwZ6g8e0LM8cky8/CR+68D7KCkxTQ+C3cq6Fco2niuPO1/IbF4nnpLB7vDS7qjrgqU4/t0bq";
+ sout << "qMPMFgX4MhBikxwzWh6xnQgylA47Rbzy0EKMD1j4NLSkySg7zpm29sTTvEh68OnSBaaiNm7yoIpv";
+ sout << "41yj7kewoFyIY6wqbc6bxfAc+cKLdxYIxwwXRknJQuZW2mx4wpsjXYqEwZ6OCDldMgYkQWuLd1r0";
+ sout << "LNY80XRGlKZw4bV3eBiEAo8E7yU6VBqi/C+fTWCdaD3kjC1mu7V9XX2tMg4G2kaSqc6JMjOZ9VF1";
+ sout << "06FJujVmJxzZ8t4Z8yI3JGUpmWgYnvY3baanLpy54Fd7M2JgoXJrElrfsEMFsci44CmK54NpntkB";
+ sout << "wFoMnSxymuW02SsBU3DEBbZJmXspP2yqCUVHyqpp2Wez21u9T2LV2yNLpKHm8uNRf++WJ49zGBDY";
+ sout << "vJ2xirDEOgRWCQ/z4J7fg6AwMrG0PhqX5Rw1NStM8rPEWqKd9+6KKTL3lMj0rvvVjUCylKvev2Vw";
+ sout << "aWeVNhBWgelJ9v/Kdp8lQRsSyqJ7JP9CQ9iRicBTKpKio0NTaYn8LEo0yH5B8HVycOVJmPjjYCg6";
+ sout << "eiZ6Vxr0laxchTlNPwRAU9hsZky0eO68C/cLrfgqB2IxJeZH0z1XWmsA5dGcYAWMDamqBNPTm1jW";
+ sout << "USE/0iR1KPxvIemOxt2V565of+jareTk/Q77E+jkopN7K2AmKOGouPeRUGEmfrtZQY2sarl0tF2r";
+ sout << "f3TvsoqK47umT9s3R208qwO7nC1TQjaEdP2KIfcPxbyuZ38d+GxQcyuID5+qS3IQR4X1k+lyUxY9";
+ sout << "wtfNYYbmsXnqAXnM/mKlfGlYalVxalvxo9eJdugBIVbaQ3JmEovseS8IV0dZz9ItdrW3HtHquix1";
+ sout << "xKf4VBhFqpCdVXBY0/mMlcWws6nItmS20yCf8i6jJ1HcaHGuPQvhyiv+xL8kELcu2avfpf93i4m5";
+ sout << "KaJb4xkjSXNAcBerhBlZ+DO58TBlEZSI8UTw1+yuzymxOyNbq2o5rwqAqMDxmfJlJvbum5OnTWCc";
+ sout << "2AgSoTUVOUmg1br3xDxH5xX81Ty+fMaizY/8P+dFg6MGikZZuUky2R/nJRilgi4thJV2q9iCm3sX";
+ sout << "KVW0vt0AHRnUb+QkaLl4ZJwGUcx37S2YcsFCAk/GG9F3g1zo3zUNgbl7WelqICfme9vwcl+LDnvK";
+ sout << "q8jgEUNwGe4ZcgDuPAxiOmIo0W0iRDbt97R56EnRs9n0E4W+/DWBvHY09nBYtVrQWpodvZ3ZkEp7";
+ sout << "9zg0d0rwaLtb21iZPH1swSnf3hTYC2sxq8ppPJKENrTOYj6qiaye/kqg/u+rZmhd/LwNckkjFy4H";
+ sout << "9OA+Te/sjSoRCoSSGXtGSexAhh9WfFMnivoJ+9kAIGRUCJPQCx+NPQ5s1+d5sMZHyGs6CsR2Hpjf";
+ sout << "ZvXMpbZVEV/4Fgf7VMtSexPTV27hFxD0wHrhU4lsQp0T3QcI20VYx9kTz025uiMWUD29Fm3b7peD";
+ sout << "K2bXo7y3G3k4ykOPrF4kR0VSkjB1NbhXftQImcpZiSDce0MZOZZvQ3lH0pZ7LaU5d++Dyp8xsxLt";
+ sout << "4toEQgoyLaS22tEe1jSQsT7v4AWwjMBAQngr8MsqmcWyu1UYshp3kmosoxHZ5N0w+RN6P8dnRWuR";
+ sout << "N3gIp6dBvllTNVG1Nf1zJ1r0w8KITbzsS4bBLijKlUilL4b/P8N81fEUDaXCrcdcL9b0fzv64vg4";
+ sout << "9ZXsKIfw3rjw+/ZiDt6a4WtwCZvCyI7cRRB2YGxaeynx/Yyqq9xwnWThHA0a/SAwkokg/zbxfi3u";
+ sout << "kzPOBau+RpDKYDL6DBkYaxMeTnbF9dRZnckTEhu8kcSh5uHhVNSeyYSngJ0YXZV+tEzNi8yI0wA+";
+ sout << "6M9OX5gcHv1pbem3Fyx/3Rqu/LbjD19TtdwuyhXnnGcCXPGzqFrpEdN6504IdM7WDRrkdjpyTlOR";
+ sout << "5hzk1GymO/IPAk8uRPEHeU+873xDLoxeuNseeNDRuNwFa5FcIKmIAEGeSydVPv2uPH6l0VQezasr";
+ sout << "XMXlWOCOFo9eGGJW0p4tz4qX35tClsU+0ml1uaRR/fFeJyJ7+WrqyqDdHpTqnGWp9OdyyCiowxFD";
+ sout << "T7MJ/Py4+ytZ++sArWxfC5KATdztRSzuquLbzKfLtF5x3vihfHzonsdHzZNUTi6py1JV0uQO6by/";
+ sout << "1HjNlxiNqfOEaQxfLe/KwZvvvSVasZQz+64BSVLPlmwFnha6iUXYl0m8pNQ/E3QlysaeDvf6j6nC";
+ sout << "By2Ft84JgN/2Gy/XJuQRobhPiDh2Fro4ebBwOjIIfgQx9Q9FNzKRDHAS0/HXB+P41/CzXWr2eTu2";
+ sout << "PXZ0O8TshRiD5gH7Su23qpwB7qlNrPbULE06oc8ftAJtuKAGDPFXbom3zFVjsj/yeTmS2HvU/b80";
+ sout << "8FCgSSxx8cvIqfPHLLOSPBUxJAVyCFPvDIUzHE3sBPyw9OY40bgSyaYoC+hoKuf0lHgt20r1Tz5A";
+ sout << "Em0sAZfnbWRESfaM8lafrA+OjaSDQQW7wAI4+XO+HbWYxz6Zj3hu4qfmID2Iz2kuo57Ci1RLOUzl";
+ sout << "sWcTkVqUrdnoGzjtese/OnbbsCUQIfCM5ygX1KKgmlb331Er2So/dqrYpCFAqBo3SmctlCtmERuC";
+ sout << "hkB59TSoGqV6ePxidHGWKBsEHudcQyRv+pwNhnRkW+l1XmpzMmdQqeBjqcw0RAVJgDmHS1SMrpPE";
+ sout << "GZmNu+LmuN8xdGV1tbylOTC4TVND5NOWaPDE+vx0YaRi2orc7KgrLc9OhtAQdTC7hlk/8jlmYxFp";
+ sout << "xl0Fe0oW1fU0avYFk0p+dUXfYCA6HXTetcQJdo7Ai4JveZIUdZ5xW2snKTZXd/IAE1UG1o5Uz0LA";
+ sout << "/y3Q2+1hBR+4cVnmqVftEf4ZQQoJlY2CN5Wltg8kWf3nsgSmu1JNtRXEV0DKODWTAGSizMJJleG7";
+ sout << "ilV6GJU335MapNMUrlHYVNicy1NY8CX/PhZtbGzqNM3gfwS0KhWmKN6HS2BYwfPllMYJk4koLHWS";
+ sout << "R9dpGgz8n+PDrKOcLG4U8xYocSvLUVJ6YmdbYWtWunGa0I0egn3YmUc6RTflJ6G2rRpjY/B//CY/";
+ sout << "tAKUuY+4e+4/VTH/sm1z3yHSycLOorAeMjKnPpDfN5d4hyrqeHgG/PKdZ/QlMZYw0M2Kb9fpvefl";
+ sout << "cqeu2IZaH1E7G/YbqnL4wRlPq6HSQcT7C4r7vJxwYGGwZ2wClOXdvvbdjAOxZITv116e6jaMQ3vN";
+ sout << "NNd09s9zZlXyXDyBfdLgFebbKLjrxC+2eB16aURJkorv9jQydbdMMsZ7DnW75DppDplDyPvMC3TT";
+ sout << "0o4PDSd+YBeWo6CAZcWNR+lB/ZB36tA/RIyCopqeUOL/P8cfeJSX+3k1yghKm4HeY6VaWJSALiiP";
+ sout << "/0nEgGc4dbVmspZGzaORpGOTqqGnHm7TiZmOJW0PFrCVY0N0VqHcSkTOZn6PtuQKFXtFHYEtpepB";
+ sout << "0d332dQEz4AxgF0qrhrviqEZpXTxG1bWE4uC3H0tDWIJhyJe16xF96vE3QN6z4P2+esYwjsfRdQp";
+ sout << "3CYcxcjagfWBngiEfRsseTLhVf1KsFIXgafpK36gWlZan3O/7uyqhOYyg3W7aJ20bvZWuZZ94Gls";
+ sout << "7VLJyTKS4LNKHcb9Y2PtX7nzosb7tL2hcYgGChva73FTDSuf56HcC5+KM+t/MXf+BPBr4FUsgN3M";
+ sout << "1QrQiRM4dZte2DfAvSaZmWQuqtsH2uOLimHU2AAsfZav6k+7Qb4mQs6zdRPTMPcd8cUv8MWf3aOW";
+ sout << "skdmBW3ausbs+iQFW1MUqr/4lwe4DUekn8nfniBCI129E9p0JLXLw9NrT+669orbCX3UOGc5UHTg";
+ sout << "0SvFTMRqw7Y9mw4NMLOMCn8/mINMrmmaTvtEwLhHSOTwYjRCJ1A353ENYX3T551pA3QS1CrWfIrI";
+ sout << "apV92iQiSH5A/rzOjhq3BDCU8HIXihgVCsR/w5Kcy7nCkI4pvbsIjPNivZsDhx8H+IKNTUNpSRt/";
+ sout << "CPzh885kcdofM5k/nqVvvJhQzhpk18+IRuxTp5Dqa62MYZwgEgURS1lVVzWQX86wJl6/4cbIVEcb";
+ sout << "1dowX6FrycYfy81NkQdF06egVOZVYCaZdW7f1W4Xn4l7PWb6pb1HRb3/0Wu8YuYjm8pSY/wA70cJ";
+ sout << "G141hphqUxQeblVInhyk40zcaroWoWWGTkiHUnAp2cQgjZJRHbrfoJgF5UVmdDJrwdwiMREmR7zZ";
+ sout << "uSS4xTZ85fXMH1NGcOAu3tFt97bY2uh/Nzk+XucsND69TUp4s7RBmiahbRw8XQa5izZMRPgYkVss";
+ sout << "A4ggkTd9wiuotgjMtjd+NsitkGhuoQiQHLW0R0PVPEfG5mtdlp6/PlqCsTum8bRuxivsPkCcjEB5";
+ sout << "9gdgEeSfSOSRTe0hnm/gDQXs4BnFeyG/YzHm1pHHIP2VHkr3GlWVu0w7tLPXFLXF8kRESZgo1Np5";
+ sout << "WFYh5OmLS+J3xhJMfzNjUJ0bzfN5BnlQMDT8vttfAovg4IO7zFTexfLBxgKLWJZ2aQnoPeoBIJFk";
+ sout << "uPBxTjUY29133ygl2fAylnqP9zIipFbgkLEw0NrfKrfKIPK2Kf9VI1+adXQlsHo0vsI7l/uWbW5n";
+ sout << "XMP598ROkdbP5aSkpXPdld++HGzYtfD76teXubxsFcCpmm3OSbcLo2vAOMDQdpfFTOLM0ig1VhC6";
+ sout << "tCC32GUIWix0VLb2dZCdidXJXu40BNL+ASvXUAipVBD+qBL0uSHZXjns8B6XFD88zrW+reKH9ZO/";
+ sout << "gRZI+FH7eqxkZlu5ChFHJeZVSK1MwHk1ikQxAe8jVXTzzVAMcK3EVGvH/l0RKX/SJvU58t0SuHOr";
+ sout << "HVule85Z1pwxt5DWS5IYXibDgxOWfrcKp1iTXhhKsmOmqLA4M8iGbtJ59RjGp3ndoc8k5lgx0YXO";
+ sout << "CZKSNjMhF/swTPJF3fvVwzBU9vvWe2HCUTejmLi+bivqW3n+qtHiI1y9VWPY9B90N9OalQHJD2OB";
+ sout << "myHYFjs7AVdwFYoQZIBFtaKpozISrGZBa47RfYaS3zH2aNzYnT6zDsgKJYdA8skR/tNgMQPdgEtD";
+ sout << "d/NZPBXg0YnjIJNX9z3kcvXuwNnpv/ZafofCuHJQDRlWe2hvfHXZJf22IWrkeFjTGAzVl7bGry9d";
+ sout << "zaaFJKt73ogJ4IM36xHJ4pgGuP+daAnif+2sdHoEavTV6zvkV8T2UXgdP5LmPVnoz0zue+DBTt9q";
+ sout << "Lco7T8RhIgRDQL3K/PB23bc/gYdGKh2ISHY1klB2pUWu30sBa+g1ap7cTFIC5eDQ0L19jaJNlLWf";
+ sout << "MQR0Tshn9dRsznAfmjIm4RvhMvf97Wer3t9CjFOOaQfSXxdPQ0KpnxeyfyuBpRYScUX2cCEkIXjA";
+ sout << "IiOHn7aTJ22O3S7vZ2a4jVygPosfhvd/nNfWhAukM9UfVIFi/kG8CXvuPjKhn77k1ddfIbsp7Q2L";
+ sout << "Na7xoC6HiPus1PI5kd/p1NkclRSCiMBCl2qQfHqVIioK25jAjHl8SxsiPpkGDgiBJTO+HR39syAt";
+ sout << "iyjvIv5KH/yAzl+w13S68pDbAGZ7U8hDI/NKlhgVycVmkfKZzGB/9wzJVemHhvosb5tY/P9ZHqgB";
+ sout << "HuIUgdxusvm5IyzBBl0DbaMxwdHevHQgJ7NmxtvGDDofkHQ8JQXkr+N7+Oswa8KAEdJc2t9fST82";
+ sout << "X2NPa6s0wEQ9PvZhUQd45UcFtXCBbkCoisIEA4X5Af1xG7gqwo/PqEW3g0hYwmbWDgNTnEHaHSsI";
+ sout << "4k18Jcem/+3cNZx6zFKEZ6u6/JlvGJsNjvoZQlRluwF64lvxtmvxwD7ak+UmFc9Ll+cYhiHhdy7h";
+ sout << "e2RAF5HLTVf4BZkUs+NI4h4c5z+pU4dxM4JOVNQ5OoBisyMeFbFDlBd2x6wRMtiREIF4h/yDm5/g";
+ sout << "QIQN6sXCt4zrBLoPvFBvC7UvVPaLEpW4Ay7yeYoFhTUcZ7G11HdyJHIrWhXj8mazBuT33wiXPXD0";
+ sout << "fgDO2G5kVrYv837OEagI68UWoYwHFVQyKy7rXAqnJOH6rUOiSeXwrAs0VXCOo/iWz0+H7Vgkjvo3";
+ sout << "a+RGXFU0PakfqhrS4elT76gTx27rIG2VuKjWnNWi91g2MnD9dwS4xGNu0nB/HiOTDKKfzQ3uoE/D";
+ sout << "oP/Fn1wFPtBDC6935HgjJA30RBpTT4lZtqaVhy4F/pFfqFBL/RUU0jmlG8WvHSF28NkXMJFxqJqr";
+ sout << "FdZj/ImUTWIBtAOXIMgu8OQ9LXpd6kyh7dpy2lLG5ik8X18yJnJS6WZnRAx6nX3Egg7zi+jq6P+B";
+ sout << "AGQecbLJ+ngHGqQd6fP8hOU7ujU4yyAEKX3CWnvjK3DKN+fH9enZ3VHAgBS6uy4BicQeqNJx0D2k";
+ sout << "ab3+B1WPW98ieaG+G6TjyFGNFwqA7ComKR7X745Px3MW0qoQXJVK6FkV1x5zOjrI09hc3G/NqzC8";
+ sout << "5hR5EA4ebicmVwYgc2rftfhZ9FY8vTslWklZHaZosv3/QsoFuoDNMA82bg0/tDUo7G47EAETe7lj";
+ sout << "7MRS+dqjw9rMDlry6erLVu9vOvz5gVxhNM2+Q2VKx2f2ugkn8/VGq3ufEEoEkKtnLI0aJuli0ZFM";
+ sout << "yEDEHqombS/JdKacUNhM/zevqMA+F63pABb/wTCYhNIZWZM5SRCoYiduOSQYs7eR4ex+Y9a2T2cc";
+ sout << "UyRYpcFRySR4XB1Nf5GVdBmuuVqvg7wuRwCKXDkxcesKSrg7HLlUT9xaIReIHoFsZj5w3RcZvSCT";
+ sout << "q+d61nOKgP5ym2c5BwxytP42yQ3SisRyykroBgipHm7QmrJnrsw0OZvAkFh/bo/mhstLDPKkoA89";
+ sout << "Dg/kr5VTRG7Id11Qb6CKvSb9hq0mtnGAGnYa7XTmmojYhVXeFF1aj7E94bUVjtl1rJHlpNSBRl8U";
+ sout << "LffnXik6Ju9PK+gw/WXTgcyOM0Gmw91IqJsBiNVmLMB8VPpunL+J27aoPVdSxIpR1H5PjWhWyqNz";
+ sout << "jDTlxcIKpM0PBs7PyNJ3a0QSQYQrGDYI9VkTQJpxwYhPOAWdIjTKDBfx/wl5UyxbcAzD2e4mXkGq";
+ sout << "535EjhmfhG0SRvXP4heNN4Vzl7KMDjO9VNMB4/L7500528rL2b1wC81igCwNWpUL7Pn8/XTK8jRz";
+ sout << "KCcb92liptbbjGVb1V9hh6o2aR5l6ZQtS2MHlkZG5rucr+NT/SFYoVKB93lLnq2CbGPlzZZdPHLv";
+ sout << "+RG2+80boVipNp8XD53hLWtOvWMQGWYwVWSqQ6jrO24yXdRjZCn+F7mn9mP/8U2dEi546WLWvaOi";
+ sout << "RzQJdz+5IcL8RBPWCcUqR2VSyPIT5Fzw4D2+W6G3cvc7mWgiEjT9Vzjpeo5D5rWM3pAPl/kI+gMg";
+ sout << "42h/H45O4HY7CTb1Isa5GyX4L9vPfLXyfqU0N/8v8/UvQyB8TyKaDMedoiA7+hdWtXRMJ3cFxepR";
+ sout << "WqHTCK3o0Lhh29qPjMh7hKtC4xNVL5/Cz9KmIu8ZYAC4aWXX0g7wzBb9o/Xb6mJ+ZEDmtI25DiFy";
+ sout << "McNqoVFxoxYhTrUYNOaAqJYVTOauV8N3Mv9WiR+s00mjLAIEOrcExjt6WrCUVAI6XCqsWPibNFTM";
+ sout << "tSBPtg/w6jDsX9H13vPzwv2Wt1gGzejC2nuqaEVXlr1UKExiXjQGAJyiw13PjRHSOD5zUWOhkPBr";
+ sout << "h7lzK/HcCTA6S8PzM3yIPWYUtlzfFT1a1gIoBvO6dx3EbGGRYcePQ+xCln/07SFsUXfbKu43/RIV";
+ sout << "LUXeP19XnLIFDk9NRlK577rEfl3rZN2TkeFXMESPA2E3WtUXLw/rQxAi+E9n4pHJ1dfLJDrXcuH4";
+ sout << "iY3QbOnJj/ay/dY0eDCjoR8XBlbvqDCvuKDZd/SPNLseIwXXTSFxnRBONagPDgQTwM6s0bqn99DJ";
+ sout << "CEVWRIrkCH9o44FsWgF/gZEcEwFbZifnkjRSbiYPRn+euaJrvU8t+x86gbdywBJg/NdAPdn7gmyR";
+ sout << "V0mxYX035zo6xzjvYlOVP8cDewPK76P5sFFM/00tE134eK0jCmw2uMJ+AX2mEm3RhtmUdShzk8Y1";
+ sout << "lVbWyEn8VGdiZmsaB9SbeWAX4vj763YnH8pogS5AfRtR7+Z/B8dB7wBowpfs0Kvtm6LzPAqpyUYI";
+ sout << "OqvwEK1/f+fwq70lm3K0jFwda0ycYlBWhmGhpuFWYh60ictb+b19JLmdT59NrIme9aHqB8+SrEnN";
+ sout << "C5eh/Ee6eAU7fcsai415NdkTphEqSj/0IJNNqpoAm6WYXyNcFeXDma7VCa/6mXZFTHi18+qWqdYa";
+ sout << "FjFmYunmwGz5eEXqWwZrN0RQlZLzGTjAkNuVnrDhUqfPtTQ3Jg9Q/g514VBc/e1f4tcmY4lBDeGZ";
+ sout << "yAfWnvwG76t3EfpMmnfgTyHXf+cf2g6OUsha7eTpLNh6DUT6TBPtMElNMOWMDlCRrNUAvUGaiDCk";
+ sout << "dkZ0LcNkPZmrS9YTvzVVXU/DfKQk2Lc4kJw08UoAFRTXRPA3reX47HbrXeo7bU4PAhA6Flkzays6";
+ sout << "NWx7RVOmdSvo0Jc0Ec2rzSfShigqkmjXo/CSxvmuemnMZaBJb6o+R3iVOdMpCsw5+PwCtlG9ghH+";
+ sout << "egfrWDE2MA4hENDmgbONs1XrTLrEWl26avRL15OcLNfjb4Q85Db+r+8Q8ApGlnw3Sk76VrAoqY5W";
+ sout << "RIsD0jH/CD5VBtJnFNU+qFtYVNp3kBNKcNTP9RDpi9BxY0qZuEcRGtfGJMIAtqrjo9STIe7RxdfE";
+ sout << "wy30LUHKaHZSURLZgUYeY5h+OTOYDpTMjwm+xT9r2GyOOQSmNL+2QVXyrBq7BxtbTwDUOd+a/9SU";
+ sout << "rCqZfwqmA6OrPbLHkRkQHYJmtsR9k/+C3R4bSl28PQDEf4jfiA2ztVZ3Ot3FLHooVKF5Vk8kIVsp";
+ sout << "ZnbVJA52R4CLnmUyLsqqwDaUJYVfiOQ3zuqR8lDzhNDKYuTzkS2g+X9b91wF58wA+Mg/PyhUpWFs";
+ sout << "iYsELBbYlTW6eZYdDsIlI2x7khrgJuralZPQpwt2+3cg4NThKyKTpCIDQee6stuiUXqh1NKE4F/k";
+ sout << "U2M6WifViFrXuKqIChGe9DSraR3UfTyjDm4l3uGX6oE2MWXRyigPCfkHCqunq34357V0Bds/Q341";
+ sout << "hA6uDgedbPjXqPj5MF7/bZUHvqnnwD22WkGXfzgCpGAwN+WOWFxUZL2adBjJoEC7hYwx+umcXQZa";
+ sout << "H+p04+MqMvyzyfnI8d20mZQaZef/nY6vdNy5rXna6ZyFJnKR757m/OUaL9wrvO5Wlk0nzhQYnexX";
+ sout << "KyOvQYBbd4Hu5tWQ518f2RtMZzm7ReM7Xr78leb7GVB0claKWD+7ptwWwFVn7eiphI0rnxmSY7Fm";
+ sout << "BABTROBL//vZp3c2VqmpdZbhgeSyWEvyZ8KUheldonXlpbm0OahvTVNZHafDX2aUph5zYHSzT8YE";
+ sout << "cvm85ivZA1XdQfLkQot0Y3H6rQKiW89VE3qNoU2IReWEYZYbTk2FHIgIEicG9LECWuoPaHMLf+tr";
+ sout << "8dwW4q37Bz4IpwhsPrlBmXDdlPJnWDvq/C5AWpDkgr5JEg2E1CL2QA0YsUqLMt3ogOyqhr8jvAFI";
+ sout << "mWgYRweFTHjCMSbsRHRKr/WRRljRHqIs4sV1X7wzsXXR9V0GwYnkwHWu7SekTSivJHoedUspgymz";
+ sout << "Xk/OkmO3hJlvow4lyNXI/QsF4ObXSF5xvUuUtNHy4EF54IRqc1chr8E073MVyHdYbK/LYDoHJ1Kn";
+ sout << "wx+A4+BWLmeVS5weTfUHronUZlRIv1YgrSzw8M3Uxf3tA9+Vi9Pzu29X2IeNt7DbiZzGzMZWkgeD";
+ sout << "ye0enuQfIDcZ/AV+5B2mG6uxjLnMFp3S5HxSQEfbCwyvpxHEwImq2NR+9/J4UcxXNsqe2k92Wz8H";
+ sout << "jPlROh4/lFbmzxE2kT49ro3ewacP+bvpRpWyXpuVQo/oMQ0NuljpBtIoig4csJHhp/cREvXVsp2O";
+ sout << "q3nF/kxgs6BrfptKS6RE5R+gz1s9WNJ0yJk2QkurEWQReloMW/w0ktcfENcFevr/HkOtTr28gP1i";
+ sout << "vMleP2QxZrdNad2iBLdpJRs9SH663kVa67RTfBczDeXTqQiFtCpI16Dr7GZWnBoYtT8d2AXtCsNQ";
+ sout << "wUfRuiVc23aXnOkgD5PtRmcp7WvBDczaGtBBmTbKSy4DhlWol+B8kUcajMRGnrKDC6j1a5mYtWkK";
+ sout << "XzGrqT7Bsg0Tffj61OjYU6nCDLPm2Mm2XLHmhf8Ud3WJWANjsiWQKFTNH7eZUhzL3XhK/Iet+dPk";
+ sout << "Y1h1o4tGWeZPgFm9lLIhXuT/SkLR+XAGNNgrbAMGSlyTP6d2EPn5WvAL8Txt+1EaMITteGO0uKwM";
+ sout << "X+vpdPJC8cocJroQOethNqV+vK7brbtz7NZsdgFov5VYoLNWpwQLigIcaDxORQBC5yMh1ygeDQjD";
+ sout << "exiUJVF6Y0blMVyLqBz5OwIO1ftDAxeirjE5CIwSAs/E/CslH8dTJAL6hmTJ5Ncw70pf0NvzXkJz";
+ sout << "GUFSaWzRNyrpqFh7vtXVSqy/YAPOmvbU6TxUR8I/1O5afmTKkETJQP90BiuzjYs1N84IYoVt17YD";
+ sout << "xPWkQWH4LxwDdA51WSGCIwE/svX3BMYH/0TB8EMdL3FwVpNMgjCVW4srZP0KShXr6oJ18/RVSe8e";
+ sout << "Ls5HmsOL5vbvGarpKLvt0IXmveJx1Lr7h+9FAOG/n0HHD9S3Ug1JrgweszosdeIho7KDV0tGCEZR";
+ sout << "lLtI0p0KkoNEc6Hcm7VZyMOx6d4aN12NuVL3l6DFtd4SCFwBrIgqyDIf6ZAb4xeH2RckVs/uTBQo";
+ sout << "tNCZQKMaxcgc3Eee/IOjDVzFhjW4lo33FHhTrzYzvCISWREM4eDr4/ce+UZ6E00IsClZI2h2HkiO";
+ sout << "3k6ydBh51dHWHf5CZPp6gU218nIU7usc+XttaZpsodclSmh+LpXGroacfKyUGTB+I7lf+yzzBrmk";
+ sout << "Eo+WFBgemz3Rbcyem+zZaRS8YXKi4NlUX5lS3heqA2ogFwFDbQJB42FBCEMi3WVr33GELllqWSqw";
+ sout << "FZPBsn6RrIcIwI0/ehYLjO01/iOmI2Z+YAg/DRuxYDI/eV49z/xv0fCU2Xs3vYv8q7ZH3fjwyusW";
+ sout << "R7lKhO5nksDaJ1OS3bf0u237OCQYoc7dB/BkC9BHX2eo2Ou4x3pKBn5HQnldPNqA8Om/tdCgQeVx";
+ sout << "izQq9KJefumUdCmn4ChiVdPs7SK1DfhBnyL2xVHck9bC4fb3MuWZYL2XgwMwQ4OOOtUY+/sX8z/U";
+ sout << "O1EM/l936v45oXW1LJJYQtIhBf/rlqxqc/3l2YtHR7Sa40RknaaukggYYQJB2PJqXNBs3kI3FwIO";
+ sout << "z1mMafTy12d1ROBlqBee7AF5QJ25fCM90GgxqRwuF8yrGyXwv9pbnqmLBC6tz5DjQcYWpwm6qric";
+ sout << "pQu/xFI6RKtMM+tq+ETci06pbHQIcQaTvvW6iX5tEJ1iMFJaCueVEx7Q0MOgNhux+338+BEe7Gwq";
+ sout << "5g8xr553nO6KTDIrcTAgkRnQIL29WN3AOoFl5JWtP4uo1/cKqKpN3JcAIW9yQTjKtVOfSvZgSRxg";
+ sout << "Eir9UhU9SgMrM0/cdNLZed3+P8U6AYn5LxZDNWIlFzmxCBRh0SuJJ+yXtPkqApgvzso7d60FhUPa";
+ sout << "H0LYx+zhjx7jPhABHXhu3lY5uzsMcRnndIEOx5+N27vFUwxJC+KmgOi31g6XcPIBqwaWV5YxdVf3";
+ sout << "7cYOF9FYgRaCwiE0OKGADJJA6mbTYv7s4tzcPtHEyg3XK4UuT3lJjaxjsl/VLXbWW3JKKXwOuYGQ";
+ sout << "w7ExwYxyfm0tbXgwlK5nctw7Xg9U5CJA09jh2RXOQku+PmwbtseSP1HbWFC0pOskZatQneNQ6jd7";
+ sout << "kcYZ49MxSI5q5+rZuEOke5NytoWvPIeCFsKMUC9HqsRPivSrczNf51yX0fIuSTXts/ch5fakqj8f";
+ sout << "cA3Ln8e3wASkQo6qtG3PP2tAyEOP3qj39DAZT9/a7e5m49vjCx+ui8Kq7tj5ehGuKhrteERJ02q2";
+ sout << "ObOq3gYffJ4LDddCB1HQlrK1vV0ZSDctR4QZbEhD5PC0ToKsarT+FKHCDN2GngDIh7zSmEkq1YHk";
+ sout << "VXzD3V406Uij54UzOXv+6riqiv139xC8jo5TntvqR7S7/CNQu7D2ix+l+WMlUbFWy3PT6pL+Al2g";
+ sout << "ydAx24ZbJ3PQLivg44d64nA87Ii+eDsrJNIFt7jitB7w8tDfpJRsCyGWDOkA81Y6Us8udhe5KDUR";
+ sout << "7jekzGrKuX5VfUGAQi5+Vmk9W4d6C8U9Ma/D4wHIWkU+M0ffMVEgJjETYNMDyefi1gcCwh+OMCgC";
+ sout << "wbPlRBhdqaUMFlMBv7ysGeYJUQ6VoDw/TFcNtDMZV7G5mMwtsMhfJQV32hxLDhscF8nsafoeWIhU";
+ sout << "9aoihwKib0H9EvT2gg3KyvAIwNM/EjpninJXaaMNGbEFJ/7gEAZ2W/jAJ/eGmkLjYvv7Hh2oAkYn";
+ sout << "5UOk7TabvEFSsoCdeTjR4YjQS18zZcz3Ta7ffCgp2htZaAyJwVeHbBBL6KoJAlsgKi1yVtP8HXY/";
+ sout << "lpulZcwuaKXjQJVcHS/RcgZsaam9WxNbYh+X/CBouHSoZ2/LbtimWXb0WbQE3xk2Ntq3j7FJArKl";
+ sout << "gLGaykVfPzTfXFPKF6BduSPmxLCmgLgeZU2rIYBC0UXMVtEtizbU/4MF/Qo+0jjbO2BZGb/6tyea";
+ sout << "PRE/tXWyjGHkcx2T0ZUYTTpPtWI4MQN2LxopbcmDkZszXvxH9MlWC1jkhv2xBnddamb3jc1d4qmP";
+ sout << "YetgmnmQzoozyXiu95T4s6V+H6gNZVhle4+QmwxOe+zNzZaLhn9GBiwfLDGfBqc2usa2FvFWpqke";
+ sout << "PGE7LZ5HN+A4XFJ7NrEva9PtoPUOCM3FU9gyGdzKUyyW2ilrPX6rAil7+ZF6KDAVYOmnQQCtp5Ls";
+ sout << "P8t9MlqGMM5uwELErplaidbu2fNVTXvySFbZkga6AYWWV+xQfekzRYoN3LSs+Y0OuUU86RBqyOe3";
+ sout << "+5F9fvPztVD8Zc/c1jfY7/3lgJMtgEO/+TDa+gDQAZoalgf/SGpvv66MuiABp8LALajU0a2k1DYT";
+ sout << "otIdBHlVLj/JKD2tfHDi0FevTo3l2N2do9NEx2tyMGoLQwuJplvP40vHr3aII9Acuim4GEqh8cXT";
+ sout << "XWT5KzFF5L8f+QK27m4lgotZVF1ujXVuQRT+CHFwv5LTiwHaZ/KFgVVY3gIkjCRoP0XRARMk5oIh";
+ sout << "F7G9FnoQM7oV8f2b4mTKVvqMMv3uWzd4zr0Tx7GTSgL5YfcJox9my7ibHgpBpBNtej8uX6MtzHXT";
+ sout << "SbBoG3lDrdZ4fhMx8d9+/oACjVThf3PPa0exxvme5s6T3GaTnZt2BgzLrSknMaRZ1Rx2/kT/1ecn";
+ sout << "GALfb5irpfKmQPLdGJv8EIAB/Br50DqHGC7DfCtZ1BshNTwgY77mI9cus0SGrgmPRXYuuEUVhFHf";
+ sout << "OfrUwDNX/L8+ztdvlgJ4NnE5tUnegEz0ApOyZWywtxDodl3AZoZfBV5ODhoHNsCyZCqD/HYZXHb+";
+ sout << "u3ZJ2PB7wcBbxSCf3mLSo8Kknq1d87o9YAQdyyNR0s2XvQO9IakDz3HfBFugDO0LQn0ewgxAFhMs";
+ sout << "O6hA4M0Q9NS3X+Dr/Roy7wn51BkJhrLvh9TSO0+VCa1EBxo3mI5vM0m+Aji9HwySVBcd9L5FcNIm";
+ sout << "PTCkaKyAd3DsqBIoHFhdLhauODisNzUFC32TZiz6VxSwxFnNvyrRQUDidLEwWWcp8Kmm//Q4nq1f";
+ sout << "F2Qq30IqkLC9nObGbVSjZb+n+EwqMlE8mJxqbgkh5TQ94V+zQY1ykiRa5a98cFA5MLazMniE0JXv";
+ sout << "YqUtmkvvbyFjbAZ+/RPUqG3eQxyvWxBSQSC/zpbG8rJ58ZUzuPkmmXe2z3PIzFpb2BMVi7U9t8Kh";
+ sout << "+ywyagKP5gLgUTqOS6Sz6p30dvtDsayhngpRtVfmOoJ+Px1iw8f+3o+JEdJGnXEcecaGIzABCOST";
+ sout << "mEC/TQQ5ecV4yEwN/cN695Nu76+cSiHR+7UyWNSVV1A9WJhsaW6NJl/YY7dZDlMYWCtlB8kUCfWr";
+ sout << "4bhrudo8xW5XeM1EtmmpONd8HeMN0479Y0KqMHYCnxYz+UnC7XZBJkweyFPa6TroZoqjaIZMEyRm";
+ sout << "YizuyvyaX1eYGM3Q58/qti2hIgrRovGl8VYnJWJbaU41OB2CY+R/TsuQo4EcqfvAUCgNxZwwmvkG";
+ sout << "brIAJ5+fOVfI4Fa6swJ2PulbJBXXS+fCrjCEvBAqg2j4GxupCDk92IF6cUH0dgmx1TBFgRTNDfzf";
+ sout << "ENcymno2Q8HKz6uvMenEfNDAsmV+giNq1jIY6gm09kMwtDk2hs1c0hqFuno7pOrkS1RFbd8onyGC";
+ sout << "qoN7GtGpMk6q82mLqrEc4chaIOF5UpPSH3sRa8tbiP44q8ItnbXnMlzox3ZhZqJN6QgrNWnQa3wD";
+ sout << "4qp2gUxGkFE4i242i/9xzvfYu8h461f7e3Es8rW0sB5PZ+FQ8SImlpE5vh+Q1qhXoNof+I0fGGb7";
+ sout << "fbTZmz4ZZrhCS6KgP6HBmbohfPXCKubfc8Q+F9PCaAN31Fp9ycTV8FAfXySbQzdShgnnhnddJl/T";
+ sout << "Vhx2PFPythxz7GEv62hwjLuZdxZq3nCykVHZ/yTE5yoLBaYUvsVVZn9rv1YD5+oSYeJZTbjR6Hif";
+ sout << "t7geARjzf6oe85YZdQvODK9SMHYo/QwdcPwwnSf4Dtmvvg5XnSG41s16X0n7o7g5ZgFr+HTLwMzR";
+ sout << "k1Zk3offnOMpkcpEXvM8OhFvFiXoN91CoCvin7f2gX7IWrNv0mSj92fbKzQ9Qa7N5cH1pldFuDcA";
+ sout << "kQ9wHFmz+rrnzgEl03Br7NqKsB6koxE5YEzHkNLHmg+pTOyu+yDREbopMHBi+jTUReVS+5fKFMLY";
+ sout << "LEB6T96s1i+ygPVSt/4H/DQsU+0caW2dCfrm0onr04auJcU+oBYjbR3OlM+6SD9UaUjt2PtNEQ8j";
+ sout << "fZocgKhSxt855hYm0y5qvfBLoDaADhziF6Exuh7YM/G6ywKMi04Ab7qecuut0c2bWHXYrQPhmmCE";
+ sout << "NXCiCX64ZIGfsFR1pf8eCChyiA/GW+rQvxv23bl1RxbDFk2ZO9o5581NjEPjQKJ8AyE57W0bZjol";
+ sout << "8s4S1ZNwMF46UgZEJwCPLuqJEJvZyLBwahHwC1B7dgoDdZ4hjaJdH+jYEZusfsvV8ZC7hS5e8V6U";
+ sout << "rkOARJZwmYES8JJlVtfbfpCqBXZ0gTPuBKt2yYIlGTqGwGu+biiNMVt3spu2Ov6MlVoxY37UZax8";
+ sout << "B0Bl1cbrqIBC3LN3uWsmnR99chFqEmhi1cRQR7+3DtD9gpkt1Uo0STq0YCgbVmr4azLoJ3kcEdRa";
+ sout << "cfZ4LQ+7zlFTAqOhh/kBPsL51ZLQ83ipFUgz+c1f71aM0eCFharOyhIPy6YAAPHpm0FdMv9Q53V2";
+ sout << "p7flk/h/s2eqe6M2jbtud3bxxUdercWJvaioQZWMkNSXcP4jKaRN88np7Xc44pjUnPgO+AYc3Kl0";
+ sout << "2DXf19IXDjAegihkFzqvAKV4zttcsT5gi8wN3M+zdoG1TcnG5gJfXumw+Bu5giUudQ9P0mUZPc4t";
+ sout << "vMWo3r7ajnqeiUaN3PZaItRimu7vzun5+qOEIT5nxHNDuwKVN8ZTigvOrFRUxYkyzoUzUsOPUdla";
+ sout << "nE3MUbT1bj2cs6ih5jqtbqgutSNQSGupC8Xve/W8tiQcrfRPBMdtvJIp6Iz+Z7g/SXpunCoFaBSa";
+ sout << "mAuCHeTbUrCQ5xbQ5k8G5qIYRglHRADsWO/oM+vWtc1jEEBZ9UwLdFZxIdj1ytNnTwIZWstliRQw";
+ sout << "hwNPyxfJQwng2WNJFG9LPvzxxnpCwYG4Fk75wPd05j14gLMcERPyWpwYurfqeQG673BTCuBeF/DV";
+ sout << "zZHI0nIV9CLhSSbuo46Vkb1+FaZwwpsD5sBg4+EwKcglHUzrqiskq83hIryVcvSKRNdb2R8VDLCa";
+ sout << "pPblCQx811bg68OJ9UHqbw8FPcSu8M5zbahisVzxszdjmyZ7fqWR1twCBsJu+kLjNEsgC1cmC1ca";
+ sout << "OLyaDYs0sad92UjkYqqBQijASoQgafhSPuJ7sEoXvLXrPP9GWb1DV6S6mD1ZBRicwlo7pWb6LREQ";
+ sout << "HFXvVkX4/seCn3O76cNuqenVr3EtIOhKC+lKDCBbnKZ2ggyoIqgO5BIteeFcs5rz4yuQRM+lB7gT";
+ sout << "Nvdwa7iYLDOsYOrtBpNMD5KO95LQw6m/Okd9RKQ9yKTcV3jQFGfAlYcvXt3g7tfz7Zyj8ED/thPO";
+ sout << "pcrsgQE1E99T07ozCCDNRaPc+DhgHyyIEHNNxmgGUWziRBKqhsnei3nPCg123K00ifq005MgO5wF";
+ sout << "oHUXDMjbWBF+ELX8MWGaMh+a5OWq50xs6wPw1WhPug4vnEJa1j1rJxeGRSa3SsELXmeY2QoAX/9b";
+ sout << "S8HeDJq2O9OdVy8bMKdfxxvxL+Q/5m35EGdExoccnU+LcpGc1fIKqLfJn5oKPS6BlNKZnMpkyV+8";
+ sout << "z6VQA7zKvYP/Coj8NSmZkBJm4SoUkTBt0hZ9KEXEAAzR7Xf7BzR710lXcFdRCYWxz4KEjsu8z1e6";
+ sout << "NcjhBpcgvFTnowMThMLtaWotUJ4KnGU2ys5wMNh9E94+pZNkEQ8xShAEt9le6/gSLLuXpssWJIZE";
+ sout << "JR6PlTgQtFoSwYfx4+/iqbMVtbH7f6tBpLbnrIzQXRUY07Uw13p7kblM28k91GxRDqbu0FR3cWnI";
+ sout << "nTZBplbiYcOM/SlFJFfThLbgAtTO0RWgJkSr0n0n4duBka7ZzfUiMXjyAHvBAQTGkHIb+YidnC0w";
+ sout << "eY86YsFjo3jTqJpLYuciQLJ/ZUI1g9v681JJVICEw4CxgguvGJJNOgQDh2CEZI3AGLlbm81ftzRs";
+ sout << "MpKPE/RoI5kzoo43tcr3WCUImlVdrNak6K4gaNXH4J8aui6MN3kmZ0LWS0l/7Of/bVs56dEOozSm";
+ sout << "PcGfVLmvLExSbXU90kmknOftcUcTG6J9r1S6PR6qaF2w/G1t3wx4mdberrM9hOIMoXQhXEFShNHn";
+ sout << "uZ/DLE6t4Dl1pvD707gtoY6UpHpv9oV5xVVPVnq7smExwf3gQsuiHnDDGrPlz+DmLnkZ1XYCaKSf";
+ sout << "EQleJCCuZApUfEl9F0EjFjSDBgPoPpwQ4mhDXFjhYjYhdWmOfANCDsfXm2PUoBm4JJNI5tV6fa1+";
+ sout << "S+yaFaIw3TiqWvaOGIA0GyicfP4SaHodizLUVo+yfYJhycZrWqclh3OkuT2SVndIDLs+8Xc2oSBY";
+ sout << "uJr8P3Rm+Du02X7X3D94XvNI+tl22SU9CMMfG5E2kReYpkvtVsejWaeg7QKil8gyRLtkH1kIb4Bl";
+ sout << "Nu1oeg25ufTr+pyX5nqCNqeStAsIMH6ynVRecIVr0R+AF0+winGiFVVoeedIQzWc2pe1nD1F8LwD";
+ sout << "H1NTNdcm8ukf6AA3O4pl1uvLPIZIwC3Qk+AW3u/Xwhjq6AxEpkCGoLrUztB4Xj+uzoBpQ2ka3Lu8";
+ sout << "K5XooiNcanVGjWC+0/IBmB9gioabhNXit99vFEdTQCsZzlDY8D9le1IZ5lXIGgSZR5LMPOAzSaTA";
+ sout << "b7vXFps/tixvj3n9wxtOTAMREIVaGkJgunwSNST9lQ3tcveAJWnowLztcn861ystgFGuDgW65xgF";
+ sout << "Fo1EHK/Sq38jEw9frNatPCJK3eIJRih53VdvB9A+viAX9IoSK+KH70Jo25Bcrv1c+6Gr2H/Rhv4o";
+ sout << "quxm7d9XpPAA5dGZb20fFloxerWOvWO+QHghXZjyD81o/1hhR6ZkOiK0trPkvDLcLOfPl++YIjPP";
+ sout << "vI1hZ7uTmn6qlN20FCN6P5H2YVJBnhSZrKx8gFkULKgXdT5SpVSR194vK5cwoOUY0V5b4EdSAkWa";
+ sout << "3cQm6ibiokDAktBSDS8vpitwP0sLI5K/7QfLuXjihWXONCSf+RobFWqWPXzYW51q43+MP7xb0gbM";
+ sout << "39I93rn8s5XNNwVdO/IXzd/HmMHiElOr6xEHMzCUErUuQQQ59NbJ/N/iVuv2c4JZ7ppFWcz1ZQRz";
+ sout << "dgtOzq49kBMIxksFoFuEWuRV8LEqBbNBQkW2zFNNWpV59mqwpikzNsraFEvUPBUJKz5JNSuXVLF5";
+ sout << "vq+hYqVmhd3UC5RU50BWbQgMgRm1Bw98ScOPxfMMqBFPiMntTDcd2UuFptT066sbXg8slg/itSSO";
+ sout << "/CQmixHP0Km3Lz7K5OqVdIr8GYAeP1M3pjxZCEsaLGF8YGrPnIjMyjyFxFhyteYsgU6330hc0TGN";
+ sout << "LKJw1F8lbEyGt5lujwnumUFn4Wdg+gtd91ewchV+uCkT8Atrrd5gvCc2AEWxRceQHrUTuW+u6UCf";
+ sout << "LZvF/BC/8PJszVQsoy6dHruw4E7UwlX+TCUvP3G/wMeDf9SBsgnn/KsRr0aQmzuIJpWNNULXGyXZ";
+ sout << "0OHSxs3fJYq/X1FB2EgjJMXrAczU6aKHAj1AbCUbBIaJcB2umY4YAQqt4x2q/iOJDbJ+k3K28+Ry";
+ sout << "vW5w7JdiinwwjNpQkF5OO+4bQ+rqPzJare1PT1eBn5OxRpVlZYpn6AZIt6/La8Ir2iWtAdUu9Krm";
+ sout << "9F4qrhoykAlcXnI2GZi+U3I5ftBgWfDVodX8hxzyS5Soo2LFCwmG4aYayxu+RigV4YRyuXdHe3vD";
+ sout << "L6Vs2qmyKQ4JE+IbtJQq9jkC2PrFMwfP3nOstccqgnlGMrKMmN/UX2R0emMXsImuMGTn+rKctWJn";
+ sout << "sXIysVIEupHUUD4qC1RuZGvdGk/LsV7mQO4SkPJWD1JjUADB+8Cw2nkMVCxn8gFTf1JU+ovxgHl6";
+ sout << "nMrlYnN46eRJSFIFvZP0S0gZf18aJaxkgZt/YPxm7m8ZT6NLqLE54+st+e+sWHlP+9VeqA5u4aLh";
+ sout << "XEJGaJozsDZxkNfKN9lGMkacM7LZzR3lhjX1o7cjZ9D5l4m40M7DS8sbMg0/5wXn3WxlPimZFLvC";
+ sout << "IBLsEAoUM+oWB1xRq58SAPIAgic3jETi8vYhDR3vlN8ubWsXyTqOJKIXJ8O9Tz+aKkgLq1P/1y21";
+ sout << "Py1VM0VW3XRnnzs1mN1NLO0A9Ot7NMluCUEJekVYi3jrhhLRG/tGYkLUiYcUq3ozoUZjBI3nX+g3";
+ sout << "4d9uDIXJT7GH+ovsh1q5Rw7H9bijVF/qT2TNJH0DoI25kuplXnihc1+PAMp9XseXwA3lPZhxMVLU";
+ sout << "ftPlCMbdP/LUPrfoq2MaAQ9oKokaK99YSEumcKzgcNyx6wUrB7ILTO3zqFDPsxTO6pxaPgOXC+yM";
+ sout << "mLm+LXoEFMsByb+F8xXsrtCFDwNBR8XU92UgjMQLbvymvFE/fEeYMwOd5VOPcPeOwPs8Jwn6K1TK";
+ sout << "Gl/QNu8NQOgVK9rIISGbRdpmcLRW7JY4sp84q8CHBaZ+uVRBYzdFDw8VokNyWestrK06wQG7UOMa";
+ sout << "AZGqcttVsN0aePOk13IfTG1llEL2fczoNioOpCaVqzzUSU4wA0QBOa33h1VHOY2Py3x7PWI2QIJt";
+ sout << "v1IJSEmnpKgVzYjSnpt7kUlB2dYv7w3qwDeiE47NuK4ywMZAUuONlL/ZMpR3+I/EIu3VJRo0RknW";
+ sout << "jKkYUjOdo0FrGhQWjuISkZB8prJxAFrTDxI4aiHSnfMB0q+L2LJU8LMLLI3Ok2NIxWmdIJaFKckH";
+ sout << "f3JUFpn784RDpdHlIUU5EOzfqal9flGrMlz03gMrvUiq+9ZtJR44EXoUwLLdAWN+FO8a9Rrj3PUw";
+ sout << "E9q2KZ9M6BqLc/YdZFHlpZYOiIlE5x34OqkVqcQOmlWmPVP4bSwbXwmk6oWjruRxmaA2YQ/sLIHm";
+ sout << "XBZZQdMe+i8TqEjte84I5EebJRi0oiAzTFUlf89NWonCzSnzdFQPOiB6Ik6dX8o4607SkyijR81F";
+ sout << "5ZiusAkWNx6zksMfeZzCxnS93TqiOSmFVrbxMx6BkArmc1IWPCFu0OAQYC2af0KciOEndo00R0xf";
+ sout << "wQAtJyKyfM68kaHebkMhRWDwk9HEwznfn52y6evH3n7NnDttgw8PnphD4/l19wWJdZ2l4tcmY4eX";
+ sout << "ly2zjWCcaIsx4QMz1gEGc7lvMq0J6RbzsoH4oa6Zdu6kaCN0Fy246vkDDBaW4n/fZt+RvT9YbiqO";
+ sout << "fzMs1dcXd9mCOz2yB0/i+XzzG7awbdiR7bafVk+7/8H4y3qVyfGrhilzYSP7qEjF0ossijkvU1bo";
+ sout << "OblpwSw4RLL1npCLQDtXh5ugQGcsEesLEBq2uFpxPTSLKWgqTr3P/XL+EwNJgkWDIXdMgG0Ft52X";
+ sout << "bNQpevk0mqynVmh3HiVnAZ/mUaRXZG40kqcfQzxtH1gvf41S71sDXT+EOQDdwxygNV9BhOfyA4eO";
+ sout << "gNeFlhuCPE3cAzKDUrE3ITYl1++4JIpP3BAcsT03GlBlSL4N1nXi8YuTCliEmN3TRF6FLJ5DkSN6";
+ sout << "tCk/xGsBkaZlQXjj2P4FV/BdS//Uqh6EBRnRMoRH7LgLl0QJxJrj9RxDdRwW91fNOAJjg53IIBX/";
+ sout << "XCsZr09+jke+Ci0r1NWHRhmF6qNY27CVKs3Wub6dYS/GifyrN++qpK4aTOLkkXQv5v6Zhs48qFAZ";
+ sout << "4XORp6I1s7vCQ/yU6R4UeBNoRAT4naUFGc3gvmszzFb0Cp+li0RM9wgW25miE5Y1PMpne0nBvV8a";
+ sout << "k1RYRJA1ZyOEQbmFxLMqanwITgwxZT7tK+nSITSmHAo7/m9IaopgYOumkPGdy/KsrjkR2gTq0XxL";
+ sout << "OSjhTS/sOrZhSV0HJ4pBgjZMIAlEwv6k9XsefXyREo+rudBS9CiXW6TzIViAWCMc/hQo5zo5tUeA";
+ sout << "vmTrWxkRgUmdsj6FGS21aQFPADj3oPrAXsTiH1ZcwpyzJrT42+4P/SLFw/h0NBYRozI01AVVaxQ8";
+ sout << "sbJ8VUFliiZnB++Mh46aAF1PzomK1NiHC7n+llPAdEqpqiWqVH5YK7CpKdmCPnTUuOb4a7A2Ylow";
+ sout << "rodSKCOX4VHwxzARShJxnQz+FIfVIzpiCFs9ZJKM6x89ybXTKVRrmwTbTAXRbihUKa9d6t1vmoDP";
+ sout << "07z8WyFDoeWoSmK4XZfegZeZMIXF3hrDPCYcZEluxujpPG3ZPPw+/pGjBOx3/QFJTQdeHBz/PQ/G";
+ sout << "wM7DkD3NM3fVApST0LbPQFKxlEXx9tCL3GBul7YnnUmF+N3OIs8vDoJQqNqVi5ZQnGt2uFLOYRo/";
+ sout << "67XsBQv/ZwYNC88VlJykWEbbVDN3rY6i1JLr/9WRygS6aqF2JJBTgLwXpjfvaq7Ygttm3WiT/dxj";
+ sout << "QtLg9L/y3Q+LoLOKZ5syIdk1+oQhYnSqUgTNEuw4dcTXj3v2lHm5oxzst2dtierbluemr//Bnclq";
+ sout << "8FdWTT2JC9N/YI+CZi6K1lz1Bq2yblxL+BC9UE3rtsq81Rm8kJxSIVQ9HsS2ligaxnEfCIPNvMhV";
+ sout << "orLVKxoWuOfhJt++IepP8kap2JZtLdmdZSr9/6suSkInlANS9SkwaNWB+EiTUm5cfu7s90V+m4J2";
+ sout << "SEiAMESJKLywmeQi/JcV+8lEiThyYTxhm5huK2dUGPWnzKK0HsdOuqRs0RnHn8SQIgLndjyTzcV9";
+ sout << "2mpVtjcwCRKVBFiPdyeqyVI1j7srk8WrRc0GozuUkSBMVk5KHQa3/qZqiV1w9KN7eMRaYiT46bP4";
+ sout << "2dpdvqNrms7/qmyv0q+Bxp7JYv8bYqlQPaXgHxS78I7HqFsLvP4kZVWxmhCXIoIObX304w3KRWH3";
+ sout << "WlQEj/llet0DPOhF6EPfcuMeSvH1UhucSS2pnYwo/P0BvMYvj6gcbjTowWMk3vmjiZV6a1xq0Nm3";
+ sout << "sgrrS/RqYJeGPM6THw80LulT4e0aflcyFN3QQ+C01vGHjpaw0OwIW/iWAMQolVwcZznJPq+6Aya/";
+ sout << "aYbiphirZC6fZtUMIXiKRrUxQDyfgI0y8XzJgf28AxgdyCP9Dashm9uelYMGU0Jglf/iwa2K1pkL";
+ sout << "FTS7zfGJmz66x2+7agbLuCHd7AQhZb/rfOwauykZXpCE9ooLMLgW+BlKpqgVynLrt+b0I3KqQZy9";
+ sout << "KkFxFwC9cumQJPtKGKUigDLV6+orLNwgtVmNal9RFHppuj95RWG3XgwNLLGoE+K5TiLhLY1na4AF";
+ sout << "1NXsHL3pFjM2VHEU6cOqVLqhEWwGhrmBoxIGrbmT//lUiGwSLcmDHCvv3Qig43HbRmuQWW90qFzc";
+ sout << "Sp7atxofyP8SqmW2aRw2SRH/nvCTTpKw4s9bhDEWCukZKo+bpfp1ti0P4DzSiUxTy9uSjWs3+7Cf";
+ sout << "nWsTsoUxU+YdC6rDJYq2TAcn3Zl6YBU9XZl3YLckRqpUGUQ0IkF0Fc0bb5kQCJH8h9qc0Olnqa6s";
+ sout << "sjHyjTwkzZWFmuvJPfxEZzsZZFu9qWdk/Pzx/46e3J7/VaQhtyYidoCdV+wtN9IOuxnbyIV0A+vx";
+ sout << "ZAjS2f7y5aPxzc2XAmlsmNLBqrRfMm9dvxN+gYVD3UcWgzTVxfIXRyZqmdAvuHoTq7BV8FYPMIqb";
+ sout << "SKpCcdEztosRjzsCxioB0JPCGxMhHgPU4FCNBy24fa5RIz3rZkR7xFdTNw4LTkJ15BRKMsBxHaiS";
+ sout << "E30FSLCvFvBVlUITI7aOrvc8+2IgX5cXLxHQeqaSTyKm76ioORshYyRkp8K1Ao0hZXCW8rks9fxm";
+ sout << "FeRen1ureYGmkoKJUWq92iAPwNsFvvxTm4IprFoUDn4s56Ung85NTvcEJEyc4yVyevqb7hh4axAu";
+ sout << "h9N2uFPhx9YPLbOHA9m84PWgt/IPOfdwhlylIL50MZ9qitPPG02an2gjybEvicqckLYZt4cY1Utu";
+ sout << "pslZJgCrFZ7ITUBixaxhkuXW1wxwkF8RGosHdfB5zycnXkvXhdV41+QDvhZ1tR2E10jwqjVR/7W+";
+ sout << "suB35kUFqrlvWs/bBTQHwwPdNmGiNFi+uq6hIBQCm/J0MbCaYcPPfhtO3LIsd2KeiTz5oV8AqBVu";
+ sout << "NPQav6McdWJjg6dBQw4G+raxkUf9Qf9TPQspS2Ll/Kmt8WwbqiRHutPOaxaBBeE8Igx6BT5kIASc";
+ sout << "OsAnqIzudE0cclrkxN4aeTglqmwwq/eeZUSOzkv6Ge0xtpYDgqfGbe53fxtYvItYYExMgy5HdojY";
+ sout << "1zDaNXsfHTebUUjPnsXitzIQQpRobTCe5ttWhfeeXOvE+MPLRt19KxSrFKHaRlQtjYsmxdg78zc3";
+ sout << "8ceNlnoQbh/puSKv+QWnnELfDnmgVZnU7lIKs2xM2B+pNy4Q2YX1wmIqlC53rsTzU/Ik7Db7Z59s";
+ sout << "i5QWZ0R1ll6rdkCsUCTkK8aqsF+mSvWNbMd269Nch0fSx1D1q2NN/4lHDDoOX1JjLPMxVNbdxLMt";
+ sout << "gP8gpiQrFJWZRmTgYBW+biZPtiKgOztsDrZ+eolNWpsGnLTK9Xe11lZrS2fPton4L4jnr+R5AnHk";
+ sout << "F6tBVhSBRHtZdF/xNcCsSOKvTOZJCtftMO6XaKMfsI/yexeLz41D5qfMVSBHY4bIgL3Jgh952tlH";
+ sout << "2blbe7lcZxZN2XkUXXaVaX60XDTLgZUbGzqJo0+x/TJIAuCAlGF1XeF52jTY7X1veE63l423NPsf";
+ sout << "kCmiRcepJT1ihXQDd/VU37tJfeGZrN8FjJzx53PVp70r7eLJMedMbBUJBYmOofO2F+OU4Rfyab3Q";
+ sout << "Wy1ngiJXrfjF1DVqmKNfY79SQWWjlzST97ldjTICsyQdHWn8j+Bof6sL5k/3d/cT/sohhSSl0Bux";
+ sout << "GssAFa27VYsJYMjnriFZD13ZYmjuesSXyQ+EkUsHn56MB2xEwLRVEhq8ygDP2eO82PnsIXh+uVFe";
+ sout << "0ZQ+eGSiGZtqeL1aoePOA51s2eE4DxGBHWdSNr/7Nc43oUU0P7sV2lut9VmhgDleV+Du8IaZUMIb";
+ sout << "2MS/TkvMW5S/ebAoXaL68zmsU32tk3C0FG5oxbdZD1tcZ4yHNJ38HPSx39uOfRG+FrmDqomfs3k9";
+ sout << "pu6KaH/Be6Urzd7mr4EvxaXHz58hsJGVH31tOdo1jhg7i+6lCJAuJX01FWfnClLz0mSJnIrcPge4";
+ sout << "zQrhghg31+fLZw4FtNmiJJcU/hJPGhUZ5LDJWFHUi04u3+dfP1pYR4e+CEOyvlK2YXx8CHcvjWJ2";
+ sout << "kCW8TMN9I5arBUTXdFfOg/v/EH4cw54bmTgzjpn10HlKqfJn/mJPPbBGCCS+pdGmNIzItVME9IES";
+ sout << "Gco5a+DT/a/TThfkRVKhJShe7ccCC6uQPdU10P+DV2KPxOfKvar2ww3I+VZ3FKxhC0AcMZBbN0V/";
+ sout << "0tTOJVH4GxjH84c9Jr6niQJ/2w4G6tuTcJ2RLb+HnzfingM7J9VOr9C3IfG0jndHCww96Exjjk0j";
+ sout << "bE+Aq4H39i5pH3jEvdMspm1B3VY7EN2s+fC5jpmZpA5kRb6Vj+CYCbyEYNicyAJqCHdJLrtHDiGN";
+ sout << "fIhpfrkpc4+g5xHHel+nnJKgduqzs5eecM68vOtBTdmLa/nxUz/VBywqWNmmN0v1/g0Jlw3U+IGb";
+ sout << "bZOvv2Kg0FO5rJmnRIUsokPkcVxnfz9iyf+l1d2QIBmeR+YeNtLS3XUfIjHvHwSrZ3lJdfXNnnwr";
+ sout << "zZcKmHHNAoRLIhM/0ZYQXMbOvfEqUBZ0GCzMdwiEuNwVPtbDkgmcWjYR6ew4YjRZR9alIas8o+qF";
+ sout << "GkL+PT4LEHfASjQPnCZIhUISIpwzuf7Ii1I2Fk/v8jBbL47msvPsQcVH8JiFiWBGCBfSB8IfSSw8";
+ sout << "sgc4bo2hwHMIES5W+rXXoQMSkGFP6tUfd4GvS+8t60UyDUePQbBGj1lt3ETsinD0btygvMOqDhD6";
+ sout << "H+E5gRWBXOV6QrXtQSPG4ZSPlDvATmDdgLCBtQBBMTLMg36Kvia0W+Yyu0bFtFEJre9N/bmyyvRR";
+ sout << "P4tHn8uOTi3ry5PT2PQiJ9c9utByvU3ydcBhoIlcdQz0f1Rf8y0eyKj6qt1/OhnzVXuLrkqXLylC";
+ sout << "sthAWqyIavUltofLnsAP1uMHZ0pHeX6R1lwpIROxtRH6p/0+//OK4lIZx+7D8aMPIaefQAB6Cf3C";
+ sout << "jKnvyCpR3QEh9JLv22OghKwswHT50P/+z2XbyoQsRySXNTUbTsGJuYBazgG10fe6YHQOTrUDUp24";
+ sout << "6PS/wEco60z1OF49dVlYak9zGA6kTRndkjWDu49NFhJRY8sBd7TVxeiU34NVyjIDLrLlx7DcuZ6P";
+ sout << "1/XcA8czU9m2n6VDgkkAB6eaeL0XhB6XZfOrg9lY6R+xjVYo50Fg9A8AGen5T6+m0PeUr20e3WV1";
+ sout << "H4KANYIvY6+zN+Fe6K37VaO+CbDZfvFMzifvzTQWQ3kDDTWX/BLfFgLGZ6QBWrTwF7MNiQ6fCpG9";
+ sout << "BAYy9V+QWy4Iz9lqnp19J4Q+cqlIUDBp8b8vBNyrOjjLjAC7ezFUujvB/7RrKtnbaYmS8vYF1f9+";
+ sout << "mGqA78owo1zfad8DsvGEnr5J5mC8d10rXhVXXb/udkiU5iEhYPnSxRy62tgLbKHJOvW+R28r7lpc";
+ sout << "yDK1++NYKRpDIHYMuaZ13oDNdIoQD+d42Su0NsP0wECAoMmQKkWUaJyYUyFqzmQ2FJmvl6hDSbrs";
+ sout << "lDipexBL2U3slCAJbX/PyvE0KrBPBe+vT/w1Z5s2GBvSoGwmFlN/oa5I4TfcTA5W4ie1rBHUKqPr";
+ sout << "az/4un38eXTaF0Gfiw86pwlgJXWr+D9qvXQApo6KmaJhjKo+4/MaPw+iEQrU3IM44eaqq9exmiNF";
+ sout << "/SgBv76gC8hXFrucFFp5znYrl6ISQUedvk81jnI7ce0Up1jYsh8fpfp+0V54IUNKqxT9YlgfIkbG";
+ sout << "THuQQ3p2F3gLaflJWfZEo/lPtKc6RqVadizBP/oxl7q+zTriiyK1JWiJojr97xQcNpj5j1QF6uqK";
+ sout << "+7NQgfVevfP9FGVKOlIgvsqc9fnqWd7pzWKEt+kfjbpMGpr0XFERPld0+EqLts1kAj/ejnMymrXY";
+ sout << "LF9QJotJBy/QvCuoaxdptGBtrC+qJIuoYKTNTwW4A+kaf3RrNr4ohuLBcH8RRpHFU+3fEDW/kOx0";
+ sout << "fdRlEUJoxdfK4Za4moLfsmI8pfl5rBvlh9oumT0sgvPg1P3zp70UXJI0tSHisCb/uK+Hggdk5AJC";
+ sout << "gC3NM9uf3AubM+CW8rJXM6qz+xwOeTuMawJTGTrSg6bB0jREYOXfvwZ7oNgiSdqe4+s7tIhd21Hd";
+ sout << "A4cJw2aRWk0ft7jp9O/6A8II+hJoxReOnEFn54/kdM5LFasSjd/wwxsdtGevYixIItCTJ36K48dU";
+ sout << "nndygDo7qFWaryImEjgErq5NT9VUfg/VdIhCfAro6xE/1C5mMZ083LOjvTdxdRO2qtgKYCvL3HWO";
+ sout << "Q0k2/WqcaAKyh19cHsnoVKd1gXCummMSRahTGpmK82fAE2vP7MThuNel1CGti+nKVvzu7Yf3i9eJ";
+ sout << "rUapLrviTwh0cQzVAWHXxgE1PB5O1Hj1/hxwRVStuKNsPjZKFmNu+j02TT98hpYCFQ3F2hE/jsIT";
+ sout << "VN7hr7k16mXUqWbCzNWO/xo86NyCS/8HYFXMomE9KRLlP1cw1KrmBltivEnesHGCi9sAyHnE9bcX";
+ sout << "mWXL/tg8wP96Eh10KeJKzx80xfSPz929wa2Z+fAfjHqVgroE6AJQNKengtaLjTo9vBo2E4mFbs+O";
+ sout << "AUy5hDVB7Mm/koguhT3+BxefAXoUbaUkNlIewCbDUlLIT2o7ZLYhoR1WTBv5K0eQIt1wTFytuCVq";
+ sout << "Ldoni/6ad1IzqamnGBam4hFQlbQgpezUea29Idj4dhUqTmksk0fV6sXYaT7P9ELB+W/OnF6IY/d3";
+ sout << "6TUUvnSPfyfOHHxts3HnXQjDqrRhPeJown/p6tzdtytVCLeUtXu2o1SgfGJrs9uAtT5+3mt/NjFb";
+ sout << "Z+VX86HZQ0A5XgYWiBDvd/Lu+ptgu5kMX/QAzBZp4Ubxo/sF57ZuA3TJMx3CjUFgNBwHgdWsiMHf";
+ sout << "X+sBqB17McUR4Ar0OVoRewmYlRL/J9vN3ZJ/P24Ctp/7Y2Ozk6vJZBIKnTvcapys6mah6rzYurq1";
+ sout << "vQ2xWL0+hq2nsbkZ6bMl0ummFaCrPrMmBLGAQTa0Qa6QqO8sCtepBHRzJdCH8InI/jRBJKrGM2jt";
+ sout << "s/bM1qvX3U8jLY+vGwt8URt+H9VrF13rKxlgTJmu15oC0duyGyc0ejeDsopX4NkOQ6xScBsPAMV3";
+ sout << "ObV53ImrQQxdypGM8UWByxISLsLxMDDB49DhgBvOqfJCJ62M/m3zZNV7PzUkUcI3iB8QDpovj4Iz";
+ sout << "CmTXnRgtsARcRc977luV5QiiofiyUOVQFbBY7obGei9EnfRspuXIwDsShcRIEHEz49K5SdToZ3Ky";
+ sout << "XLZUEdbMsZDXWjmE1A+hN8G1oAh+fkFoXav5S1xvmYqr28vAff3UhxXH1ZKVGM0ePGoaE+AgPIuC";
+ sout << "NawQwcyWFoPqLjaWg9bgf1K+gbhxH4ot5ehVVMb5YiAbMrIp03ONvVFLGWzI3tzFMMTOemMgM7UI";
+ sout << "1T5rYrWbm0q0IBIhEpRCpR1Smtgf3wK4pUX5y+1+xGk3bIl+Sk2Niyl0DmrmqZlenbbts1n/hDyX";
+ sout << "ZS+148JBO7FJvG6L/oD7LaIzjBAPMWB2TLARdbc/ShtzxMY8l58fxDoOs8gZXcpLPHpdgEALIJWi";
+ sout << "GHRB0U1osylCUfS64GUzB/mVA4677H/n31R1WaghcDrZRgZs/aeyy8DIO8fprsM8MwamVSDoXU4y";
+ sout << "YNyDupAJA6GKeYC98bCgZ64TdmXX3pDp++8TOyDJ0VDNrp6LfdqOaMNZBoG4G8KduFcUssKyGs8D";
+ sout << "mGZ49omOt6rvrlZpGgBxi6afE/7tg4ac2hHd368gOLuPyLY8UKBEqhRGG4POlbg+v4AEegzLQdmx";
+ sout << "GrtxnwcojpDFU3k+HPD7wQwv6dSpzCGJy3d0682y/x3muJLZ/bQaZUV5yj2SCfgSIaIZcmVW6YFE";
+ sout << "dB6GXs8kxAZgfLLPT49+5jIL2XXkcmK/LK4dY3ah3zGKd/Gl+NUxbZmgvOzG2LFmPvp1Mr73WZ4L";
+ sout << "q/K4XId/B35/LLnT1bboTQ0L4BaJR0r8uURHEUO2fo9HtByhvRmaDXy/+EAxT01G0sjy4Nhwd0oj";
+ sout << "AZrryg3KSdCTwUoi/CMme7l7udmRxUMSg+L6+pTXgONqyBNDwkksQ5aw5YP0q02WH8pd4kVHHLkc";
+ sout << "oVv3diXYApXi3VBhtVmpyjAm9xy2SPL/iaoitL7/0d9nIlPymzGh3Ko+ghijUD0Ft1CJHr2pn4QI";
+ sout << "zWL0rF8DTAP2TcD1dl/I1VNW6e+GLtWgI5+/iDElGyrELfamWyu63q14Yp/Uk/PrMTsxRxZBzZ6j";
+ sout << "whO3UnC70N0EWPc5M1q2HdWuUadl9tbXFj4eX5F2OABfnMgQHDo9F+pux+TxpCZcNFWHsd3frjF4";
+ sout << "HEg6qkqxyaqgZLUM7e90wLxYi2/XV59BWFuZC6kVAkBCNWMZm+jisGYdf+kLWVrbZ091mYTllIpt";
+ sout << "qP2NaF948T+/rfiT1TbhmAGwj3NI4dd3azqqJuobPSjj9pdK9JEKhyC2QlkZ4HgRqyKTH0xPA1Bo";
+ sout << "mBCwzYhubIB2Ro/oxS152TwESUPDvZXTsDbmJXGsdCrrmfQy4NCXNQEDQrjrRY40qXKi9Cxr7wjn";
+ sout << "W6NrSshxpsV1NwlSGT1Omn7RUMTpe1JaKLINxTUJCCPOuNCAgbkmPfB2L/vzOq/PJ3/EGbolcvCc";
+ sout << "b9zvJpUeKfNOK9oqBZ2dZQqgGAD70uTPitKu0/5pA8I+14sLkIfVpVAiJI/54Jl7cz9lhMQ/X6oh";
+ sout << "ILDtsAHQjcDXP9BCfByca38PJy1k56vOjfg6Tuc+0hPcDkhWobXqi5xhJaS8WftAOhqwiYZKsimZ";
+ sout << "yFuYCz0EeT8EGFa4APSgjPSmsV7jCpOWoY0RtUYvhNMLSmFuwOVOiUHLrlsb8gFXQ9nmw/K8hxHc";
+ sout << "9kP96Rx3f+y79BSrhUcQyUrCSV50Dkvq07wfcAEvz2dzSKvz6zTDIKUFxfz6ejsAvrcx/7UgP0i+";
+ sout << "1rtTnLXH/Qy6rlJAkxXknkQQ6YG8egWa0mX9Wh7RxpTIBHPaUU8gnIxo5/RZEPOqrF9DtYoJMOkB";
+ sout << "T6davyvC84bSPrk5QW0DMqzZtHDDuzqiRYZt9PfEvURu7iI0IJxcVWYQcXJL/ZCp2GVS8689pmYU";
+ sout << "zIse1tFwwd64kJJfyBqN/vW9A4aI/PutNu14PwgPVr+NeH8wQk1L6MhOvcnJwYDWm504takuedAe";
+ sout << "hB3lePRnMEBawImStuD73hcZV1KzTKFvu+ebNR8414Wj6gVJYfPvHFv333u0ROqJ4/Mhb8occomU";
+ sout << "iedo9jM8ZJCMNbPF2Vxxgg0ahEwTKyq49Qht9fhwUTm6pMQKUXOWU/rD3x2BwrKQ7NZoOUqTlfGI";
+ sout << "/czGxGeP9PqmLcleyMhHuC4GdlvI+ulMTlngO760teQeuF7EYqbwwuwqtSV3E/pbXm15OFGSr+Jo";
+ sout << "FrOdb5/WWAQ7OZPwdgYiWxbLELUCjNu22PoVAN+PlAYShXG/qR2lO6I8Mh6TXZ2Oo1tP68lGp83c";
+ sout << "zpgoFpNuaQzLCWIjC4ka5v5k+Y06crZnzCzxaPsanjgOtAlQ+BOykH38ErkDlM4M2SGs8IAZXW0R";
+ sout << "zHgZygNho43FDRQHPiYxFtGV2ktVmtGNTG9YtWQsBcnd/T8xtUP8DI3tR3nCg7Q9esbsZOgbzCIL";
+ sout << "bbtn92I2iiTkjuPSuuYoWPzVS+hi4dB/BQWeHtnwdNAmqU8IAJki4jNkjh/6NfTgVepkT2Nc58pT";
+ sout << "V2DzfqFrhOW+whXsJIFkMSW0dXp5TCuJBbXTIiChMjXI3c+Dzes1/6CF7l2lFA9Ol+AiPwjPvhhQ";
+ sout << "/9WCwWzfsmp3+w+9nLLt5CtWRePIB4LC4nugM985fxf10qYES/K0vxKR1W/Ox90s3D4aG57SHpsX";
+ sout << "8frJ3HU/ouV73ZOtCg/lOfdocNCQa8KsoKhy0T7tDnWoKJvk0tT0RhFWltVQHV+sVteGXOImaxcZ";
+ sout << "MXDU6mnRzp5PEkQnNnBDeynPvs0vWfVSYpzQnO1tyIM2YYwfyf4vdn82ikk/Bq7onvdohQH02/jU";
+ sout << "VQjCfYj7X93of1bK2ja4vdEtcFpH34YTnSyZ7Rc+Np8BjO915a5XsUagea3ZHQbC9bWM75QKp9f3";
+ sout << "HuDpEn4tNg6RTcwWf8s2G2vZCfIDkH0geal//39BsVC9LqIyEF6urWV10NerZXv8fumNmX/5JraN";
+ sout << "aZ4GGdnhHPnryfHkHG62ESpmUCBO+85oDXrZcI1riSFKCYP1eBn/KCWkjMf0oV/pYljo1KrZgQay";
+ sout << "6vXXMdX3Ur6UUBO8iFht7ETaE5BeQ+3CZQEe3tdz1Z87V8rI1Qe0Mkq40CwniIxvk86JigLkt1Yo";
+ sout << "iQBWhcgJXCdVPSB1XpOdtZjgMPQNJqJtcbu3SDHsWQ3TWA6BDtVbHH7xY7CPVN2si2K8B8/Czxmq";
+ sout << "xptTLBIMfU0xaBZTXLSmRKNTEj9D0H+PwHvzPper0tQU5NEs0tZ5h6FZER1mgFzEmbZ6PUVgc79f";
+ sout << "ZnMaLmKpirOVc7HwFfLuwxAlvZRWtykLc8AvkZAp+bYGHERo3uHgdIKkeW82HQUKsUMN1VglX/YJ";
+ sout << "gT3Htmy3dEuYGNh+OKulcvAoRvLmTMYgACr/0ZNWoYjqAeilI7ZoqBWHYswLrvefHlIz6WAAHwKC";
+ sout << "xkbk1H0uWe9IRcf0DgEvWkXSrWRFHSS7uTePFxtLM7OQW/GmGfS3wt4/YLomS1enK1L4tH8fEPoL";
+ sout << "ZeZgjNYiMg4gQLm0+FyseqNjiUnQPkoFQg1jztPOZSxxZToCSdeUIawgr/3KKQ5+d2w3LEOQNAy+";
+ sout << "Vf+o5CjsfXpYJ8HVxpLsEC3xwVEzhpeY/04yUW2ygAfZqbsLARAAWtW2GcPN77MnGAmm46PeCSe/";
+ sout << "2QS7gM0ygtJewBCaxSfij+Xg4HQhXiKDGuwDYqUDmLInHskRXyVuuojyCbmuNmtyj+Uh+HsdFeOU";
+ sout << "fqTBHfEQzzAFVyORf14oEhGBwDuvMzLjciKSfLu6KcMCFo0TGgW3Niu7kDdbxOxg6Kwa4wiaWWk5";
+ sout << "mJpG6uhNFaDqpfVLY5D/o0Hn/QfMZwR/MzmiFjstPVEd9ZzuRvKKSDOM9Ult2lTU6z6Ci2b4unu/";
+ sout << "qfLIE8CjUAZrZnEFbC/KWx9I9zuUuoXS8elD9aG4if80uCyJ/tT4MXjx8iRsbvZ9B2MoFdO9gAUn";
+ sout << "qafo/IYR+5BOderTedEwe6FanPX4opAcpmpXM/cbD51QA9LCLyNtSoSPlwji/ZtYA97UUV6k5c47";
+ sout << "TNFB+QtUaW7/DpPZbxsU92jE19ztQ8VV5Ylm8v9RdvZDQ6T37KVEYu/z2bHX5L0r/bkEtQQ3gf/e";
+ sout << "TRfpy9j5f1lVMpN/UccOtsZfQpW1WvDcbAbOjcuPuuwNOudfzztYg8vjJNn0pMATK1+p4adLcxJ6";
+ sout << "uCh+zVgxAS9W+z/t5X3YTMnFDULRXqs8lsN6H/1t+gVBp1uG04BBDO8YMXZsUB8dLyb5BHbkjPiU";
+ sout << "c/zt703lv2U6So90+H+6hlnWDBvfLHI1W1C/RkelOCD8WVZ0rk/sgI9UYf+jk/SkIkZgbtVTB5S5";
+ sout << "gd5IXkexjN/kP372AvWgX1t+uKVmKRCrYlG8TZzDrjRD4BjS1M72OGwm3MDT6GPRcaGc+jtSGAxd";
+ sout << "eW+6BPofMujBNOBsrxKejRcLsddfOrGTBqP0Wc8B3cNOjEg/ITJ3LOu3FFA5TyLh0BvOEkuiD7Yf";
+ sout << "SpB8L9jDYG9nCcgFC19CA0Ji/Oc+aYFDMiDM4be+gG9QUdAxk7Yg13HdJirKRiOvJkf++ayrJBbZ";
+ sout << "xs8NEcd6lN3XoogW4gChg9mwlUNehzzjIl56wG01T+29FhD8Z8ebywHq0xEbP5aVGrEzwj4HF7kz";
+ sout << "erUrxlRZbmRgleNhldZDjqUpMcHyJzF+OxZ7W2AfpjJf5FaF3E4Rb9IG6vBtPhZbk3X41VpKx5Ih";
+ sout << "DedeX1BVHzRYAPt6JchdpOa+F0Y4Evke5XR97NecmGoYXrSkLrArSXDu2qqeQVx6XJ2Qginw4711";
+ sout << "fJwfVb8QMMRouBWEYz7k2LflgRG3c3XL1KP8npyz3yNw+6b3/K5JSq4hRFzXfsIWoLArSWgfboMa";
+ sout << "w/wIVdPgwPnUh3bfYKEXsQSenrazuqhPosJRMsX8qdZ1HB9PBwMKMHqDessyUX8u4kFMZwo1egfw";
+ sout << "7x8xkykzBeY8Iy0y6uOVpowXPgEPg9PZnquqW/r7qQAiqgoW/4oMU8DZtcfT9fTEN8BI0YPPNPFY";
+ sout << "1N02ocPS4kd5V8/bcTWkYmtCWIqlDtyPCEcEoBQbhneOdw6EmOmTqJVArAwydyN8TLsJq5A4WOf5";
+ sout << "wWO407yhoizBhRjGlqsp+LW9DlG4LHKLWyKCXb+iozkdD8Bh9RX3iYz4RWNSm552G1u8YQtZkeN6";
+ sout << "ISOxeylEI9OFkxvueA6u7juBnwpmt3sjcvCOLbbwwpc0JyWfAhiRdBfC7aXLjHm0NcwiULATnoJ8";
+ sout << "AMfNt/qzosP8LYQCvRYwJYShbskncZCd5amEmN4eStNaebKyFX++T+XC9EiG9FysF2VZdRf3OhzG";
+ sout << "dDBtLbkdALyBU3A6GhCueeZ4c5vEsHC8RMH5iJtYHXwLc0OWzUPC/DVlhVi82avRChrbJQ55qHza";
+ sout << "DBnr3I00eUsfGxijrqaQ4bfLLv6S5e4jP6C6waRbf4RY/Q6kTb/9oECBuXgayar3WIvLp9txdLEi";
+ sout << "dc0sokRgOMXjrheq/gZ+WHBm88SLRpecDwc4D0kGo7UQC/PIajFQ6b+rTJBKwYxeaaVpA2gaakqR";
+ sout << "BeQurXrdpAW7hWwBHjzWtxG/qKwO/w1+x/zAICr9N2/1Bex8yhnnIEcltD2qN5ykzqBzrKrbAzaT";
+ sout << "+6+ZV4ZTvaeeR9ELOBDYzkkUF6cgTWAZnwT0dKySp2C6FSPkpNseYrrgwg4Ddp49pN7ehwCE6V99";
+ sout << "ebgMYWGWqJkBJ7ID8NSMDFjOTvuWzz08QpaygfGSGgipHcuEKdHEe+waRMOMZSPVD7uDdV67805a";
+ sout << "67OTB9j4JiA9zhyXqx3STc/ICLFkSjZbl0VC5XAyVSrC0ssNFCVLp7OQ1REJYnj2bCPMpZCX7VHF";
+ sout << "RY8HRrA7V+X0HGBVPAcCrBn47lFvImWGBWGAWFO+TltuyKwvuiBOU1VfG1mxFCJJj/+dv7jIdDp5";
+ sout << "+VEE6pCWOJxzveqnJ31PvwrlGjxY8WondS64rNe2qnQ+grQCp69T4vHe7zuYfta+QyjtQxHiXSBV";
+ sout << "Zyu+R1M648V5jmGpQUT1bek5FLy5LoDDjkHITygysq4NzrytxR7Ncmgns3tdB9LV5zcHBG2I2gzc";
+ sout << "lHRBaE17Z3Lo6zdwTlGpfumcYid4Rlj7OgZIZTz3ogfaWU3OblHZ7Kqs3GGvctnHnaEtK9oqQQFP";
+ sout << "zEvAG1nL5qjUKB70e9YgDiUYKAC7xyWVoB0BCIjIXdJh8JlwXMRQDQwEwAeV8FAYKQffZB/2JEeR";
+ sout << "NnlaRxoQVuiNom9Dh7AokAEHuitMI+KD0n/bijH01pr9cxGoz9uimYQwc13mw+ZS0GsWeeEqYSWV";
+ sout << "kUsJVrisNxDeJMKW0hQuZmNTkjve2aX70+V9ouwgMlYQusDxVGugu0HNUQBgWbnE7CW9fh8GQV6d";
+ sout << "42/8i28kLjJ5wiiReXFbXDQ1XBz2lwyXXm4ITq5e3tqn/BDX5S6yQg5yKGl+2pISvDvZipWVNj0i";
+ sout << "vNeVWDITOZw8OAceKLZJ0WDSaV5W+F2Io89LI4mdzDdvOk/Ct9OpxFAacJ+H3Kx2S6b70hcdrx2u";
+ sout << "0Z+LB5tDRmxItNYMfz7br1fZKyAO8hUxeWN3EF6v5Kh8t0R7ZZaCEJgt19e/3sFnR8ssvG3yrdXG";
+ sout << "RT6qvp2JAUyP53dm4Z8+6e/k7YA7WR1sv7d7omc56Q3LXhpvmJJPbAJ/qBXHog0pO1ypN8szYVUo";
+ sout << "MbTlTlz0TECPbqdThQFSps4oXTeCcR+5fj7SR2K3FVjbxHfWhrUfw35Nh7nPfzyDHwfmX+CMdwoj";
+ sout << "oosswTADZSwvEh46PBffUydxEG7X70qyn8VhwtZGHTFNTjjncB9AXeEpYpqxo9ZzOXcCM4JwI+jC";
+ sout << "eDsx+HOZdie9keAE27XUcvKhNYv+G5Hq3rAVNe2GRgDdt47Ysz9vECFOBxO4RVAl0jr3TmfKoqmR";
+ sout << "3WBN07KGMf2q1osu+RZ0I2wAQBwQOFG4c7IjxqEh1tlBsFUQBE207K8zhrYyURXulKfdHL+iWMFs";
+ sout << "LxDsVCo3kG1L1WRBXynobD9DGvbx3EJf8eIgrKuHSEF46SHUWs6HjACerAjryYxvgHUoUez5cD88";
+ sout << "TcQwFBS662AAs2ZAp/uA2Y5Pc+mnluC+5HYwxYI1hh9TVEQPXRPLccrfkitZYn/kV1nf7AzK8lEm";
+ sout << "lavQkdl8e0rKXTunMcuayFgtPCyqiUPn16MkaQBfaNM9B+gw/fFeRctuRq46xbF0fTzBS8iQBkoB";
+ sout << "jTV7UmXA0Ysi+iVFAod6e4o9eyW/WEhGUCrAWefyUqbytPJDH7/1VnTdlN0bpeXXSyqWgTpsWYay";
+ sout << "GhdYTsPDdfTSzkEU/0wdKm/w8Mjcf3+ZcVNJhPtJpEXzoFsSL0VVGym0kDsJooOizfSqJPyczVD/";
+ sout << "b3vN7al2kuDcNZjVyWiZCfPyED1UQTSQFVZBygAfmjg/spVyg3zbiW6BjcDuxOxOHmBGXuE2XOjM";
+ sout << "Qa4Qcw7ioTk8dw1VobafnWiGN2S01drTqLFry6ZOG2es7IejQh6HsGig3Iis3xAxWWGS/Wazhw8v";
+ sout << "M3OQzSmbW7j0L0KcLOMtSxiYo5LHXxhti/pAaI0VZSLkQYgyMD/mBdbs0B8i3diXoAM/BzqfoU7L";
+ sout << "7SNslWLwXXscPrJsCwFqe2UiahyQyIvf+qXqBhKuFnIHVt0cF5cFAeEh1kYzAd0JDbi/LB2hYsVX";
+ sout << "rosavz3xN715cRmjpjuQvlIDUGfmmJPPvUIbbXiT6q+TrDdQaPw56G7vz5llNd17QS7UUZymWjY8";
+ sout << "p/8bq6MieVzWeOmiBPbmWRtmZ4GPTc0bmau8FH77JxwgrkdH/7jDL6uhcd3grTrsm8X8ZX9FGCZy";
+ sout << "QZQ52pU3tWbfy7KsWnBZjkRwzZVRY3EVjg9V1PMQYKAqps7mU0LrDBPwrgHW0c9EvuYa9SIShUgX";
+ sout << "JtiWswoJVbbJXltdhclBmvuDsyK/MfLJrSE7qnPOD009hILCojdSCmqdaFgpmw88EiYAVDqwcFzw";
+ sout << "EiDdvqg91OFplvWoGEQx5wsjOKaQst1Z1LSc/C1ZZH+mmBMgHfYmr3ZiX/eYUjGzYpdKGa00v8gk";
+ sout << "dDEkUCLQ1I0ayiaz9cMPWkZ3avnYb7TdUKa3iwERVVFEJWfhpWe/4KP5qBL3Ih8qLicgtdkt6FOM";
+ sout << "ymOpNQmfe2X+P3U6042OQPIU1vmig0euGeKQmsE4K3NoSOEy2QheNv/2GSSNRzc925zhjAk1zxXQ";
+ sout << "04KKCDA4HJKFK0tly5P6KjSVTiQXZ6e+IWH9CQfAJWumCVtHNCulD2epjCPlTEjHH332gl5c7h7u";
+ sout << "eJRVbsJKZm9z1TgFN5w1iDWI1cBLZipzM0BznWEvEoYNixyZot+Yfm6IC0SZi8DA7c4Ngn9V0ydg";
+ sout << "c8JNp+A7T7LqB/LWgma/E4+wv/cdoXiKi3mwg71ZQRM1d5oWknFfm8hq0KNOl2RsSOYO92PIF8Lp";
+ sout << "kG2KPK2nBU7VkbNtaJwjmJpadvkAEPPpCbYDdRjrvKaSw9yehsWKfLVuCLQHlO7uJmGe2LHWKojj";
+ sout << "BtYYau7+Vfyjj8K4eNqABvny8hj8zV0HyToirIxVpBmH/6/8p2imqYx8hc7yBE84LSNCEac3UJYf";
+ sout << "8sxZluHs/ZD3t6s9jVisgtx67iv8u3A4toAOlYjBqj/RXVQRAb30MzAFy1YCdxedXIvKBa/jXMwk";
+ sout << "0DsQ+YcRsnKmnGQ2yOxNVfiJZIg+KqN2Ork0y2QNEy7aTv25OEUM8U2wKaRkiyNYh9VmvZpYk5xM";
+ sout << "OPOsReX2FOJOFTCUGZ+w4Lp13OI+QLc9TBllim4bhtbBscCMN78WOpnUccS17gXHnqtpK1Nx7R3N";
+ sout << "dwpVTYBCxVVUSCHAREPvmPlHj7c7nnIfO5LRxuGxrg0TfCBUwCNiJvuUpiuqNyJha+eJCPS+SlXy";
+ sout << "BN9m4jvEM4KGbkGSwelXon6LWOKq3cya+8xOqQ6KliFrdQaNk0Tdg1Mi2oRjBlqjyb55oXYNeRgT";
+ sout << "wQ8wAPqY5TM/z/Zk9ELGeQz7JjS/ru5Q1LGNUU3tL0AjqUdU6adovph7CC0/g8lX/NqYg1oRT575";
+ sout << "7m4ihHqVTIN3pMkB8TJBZNAa3i1rlom4qcOFGdjTi4WTRd9r3ll0y4PB7V81SddpOk8RPTOIDKR8";
+ sout << "pgIa6xFOQ/GTIaB8i7uLax+IMs4dIq+gzpxE9g35BP7J5cbIYJJeMFIm92l7wJB93ANTT84yQABm";
+ sout << "oBnlfu/fjcbGvfHw41CTRpMWraO5d9nABxuIX4wFKEzxse0iAVsJA21RbpdBtIHLBcbYKCh1bUW/";
+ sout << "hRSk1yislMs9Mtjx/VSUn50NAompaF3NLpG10/hGd7pp1co4wZm7+fiW6sIPBCqLtkg6A1+qGUxG";
+ sout << "nUU6NqUvmyWo5B4Re3mDggASRSIFdD8ckv7LlAbyDgVyIxsBrf80ISQN7jRDDd7MjkyAJ5CRlrUu";
+ sout << "vFCwCo11+7E/Yfk2dadQJTjGvuyyDXW72l4ze1PuBrjnql0vBe7CCPZKgKJWWDzbGIEKGbBqDfBV";
+ sout << "EVA0NrVpWYBu3uhj8sHi+cAMwiXy2T1ar59Cz8bvuIA+9egegV19YAay92S86BsJbhVfb/THW/G9";
+ sout << "sbTgF3MWTUgvDFmaJzYn7mkR/2xsBn6UQsLYPZPxOQ4VaEUzAKFSP0zwGZfMYE/REnDIbdOR1ai+";
+ sout << "zrA5gWXmbLlr09hkM7tIn2Tw/+X7zEzZ66oDyjDs6g4a+CBt8OtOLi7Ga/dO1DP2y4n1YPAsGADW";
+ sout << "Jf24rVZGv8IWyGmlYJZWX2tBKV++HDUTxnmbsDSI7Jy3r1UXNWskyDC9zYWetsp8UxAVRYBDhwfv";
+ sout << "fqlchxnmTpm6ozULqYI6ExncbbbK1IBSbY2A+VBUrEqdH7Rxg45S0HKRbvB2jAkksOyhbXUwngM8";
+ sout << "6uG1TUcRdnxLvZniKptRcYJthyx0HDHCzLoTT1AlyTB9kvrktDArOUa1zmF2hCZAYP1swqb+zBjP";
+ sout << "CHd4aExlUf/UokEBduXCWomJLGXsJpVG1EVz6kTZb2Twdt5x5vZJK3O/LCBKgZNMuS7/xLFZpvsB";
+ sout << "cUA0MoO577pa7SECGSrUrSblkliCMKmIECRrQoEJaNKCs1r991ptT9aWnLSbl6HTTWXKcceN0vXQ";
+ sout << "tKwTpiM5nhBmea6eOQn5JP5oeytyDul1Jd0SHR4WILgQvYQSrqdyQOf5zDBQHe2/EsinsfonJy50";
+ sout << "4M4l2ArPeWEjJKtQBpgQaLF7CRGTjnv0TPJ1Xjo2Cy2OFvWpkA1ZjK3Q/I5PJsMrlkYkTSgQql/k";
+ sout << "HwmLiFL0KGdVfo6F1HT5J/5ZxCDOWhO+DmvLXtvTQSEE2asJVZWDo1rpKOq6H3AhObvUgp8AtPec";
+ sout << "+KXWiyzTH9YYq5RFU4JE8a+lt8+bGtYHxGOE6AuVgQsvUvAJ2kgqgi+kLYF19FZjeYbvuxzI3t7j";
+ sout << "+qCO3v59HfjZWfQ7FOJ/ciLVAAEM65r1sX1JpZRodNnZUvbxT7QoVOm0LwjW3qw2VxZCbVU/jPkc";
+ sout << "XQ+8lxiZpHO9mSxBXvQkwRW4K9MZ9dBl0oYdvq8IK33SkyTcaHBqlp2oszKYwDFyLo2C9uiE2BSE";
+ sout << "gj9UpPG4M7XOUjUMJ3gs7aitPsTdI5oHG1xklLOTvyWYIJ6vk2/uBrMImGSExPSMMGw0NEbHFEX0";
+ sout << "Fh4Td6+cO6QgCQeobzV6Hxsi+HTk5mtAbMGXiIKi7bAiSC3tkoiCeDfBxzqROYgSVQU9fW8PeyW9";
+ sout << "xEmwZSABCNRog1xAwHaOuosBqYDMidyh7F5ID9XeYHH0qaMxLmzYkZ0SAl1qECFzJ7wUSiwRzvzC";
+ sout << "VfatrCs84cDUKQIpr82LPouxQBeVFQAlj0Plcuo6CxrJgVR8q6ciC8eCnZ5WaF27pKHrfPc7ZjdL";
+ sout << "h4AwTLGOpUg0N0/aUJxvFGOVbm/g8pH7duFC3+ycmYrOXZPbhz2qthsxf894+lkU6TfmN3OOoTwG";
+ sout << "EXrda/bR2NQNvqJWBRTv//VdXIr1RmlNMDcz2d9lL3rwF9Gq5EupgYIfe1FhiiqDjyrTzh7fw2Qw";
+ sout << "P5T1SfKa5Ww9mGxa1psmYlJ/IzOQlSlSzSHxMVvD7c6UDpWwKWFlzzRPjl3WPS9Dmzk3tzUo2ZYx";
+ sout << "RdP7RBZg7Cveb3lkIn+gU5XII4cD79YzKPesFWpWR1ejkLVGW1YQB7jtRaJmoz8T81UTuqhxhBt4";
+ sout << "NswM1L0VtG8asAidWd9nT3Y/WWI2ydJ9YgiepKeMdtpt8aYqoutZUFzPKGhdZsZIYikb9r1Svrd3";
+ sout << "MAyceFX5RIKLVcyJJC9n1QlJVSXz2e47rrj99prVvMN4ROB4hpORryHzGuPMMhpn21ZywDk1aH9D";
+ sout << "0WGZq8Nm4YZEjneUtkkW0nXcF0nxJ0wTFBmOrXEgjpk6jzfQ+9+b803WlwtqRUJ6S6FYirRyTv6X";
+ sout << "r8xCqyXgNioZycZSgjxzLcoFqq/65u935jK7BEMtBGWa4zQcwq38IcmZu9xfCrQLCfHpIfydjxBs";
+ sout << "8PrNV84ccG5Yfs4zN6oaLOahJBukoFHUZLLM+67oALQzsStEXS+HQhWMJCRj5M/8/SwoxZM0ACfQ";
+ sout << "XY5G7OnF21/5NwpsUoYboT1wNFT2r8TKrFOx9bEIbKV8xTUgdrnZwbKGPemsypnnsFHoA3BwIWKx";
+ sout << "+w0vNWVqK2vsbL/pTiZiov1lxvfKFV25Q9uylplUaSnzYuaDQFLOPBFr8nhcmnfZA8r4ljcjMNw6";
+ sout << "0AUHO03MyUDfu0BUVYQEyGXpEvwnI5JmVOI/y3/TqnVnReemG4D+diuwm2Q5UgyY8KyykxJam4Zv";
+ sout << "W+Tn3DHT0emJNWv3imi5itW0rUIaTpt/9a0LvqIWi0Q1OOvrxGgUcrsRzfUVi+ru+tX9YZrBOYXe";
+ sout << "Ut+KwUCvITG88r9m7o8aovNyH9V7bC4axTrIxmtpRiRP5e0Z/IiH6b560Z9ixQ0MRv5SUn/lhX/l";
+ sout << "AP2rTsqxLmt2mWz9b3GqtEO0iW8XisORLiHzupEX9jRbcqShrBoG9bu+4DUO9hGdemq5lX1782dh";
+ sout << "FeZOjhIE0/8JttTmz7EZwzp7SeHup1yqlForPR0hhkFzfOOQk5CwfYR1tD+0ImdmbL/QyE/TRsnP";
+ sout << "NoDB0NVyXcmlJQzRH+s/dK9kJjHFn4FAUJ7Eaw5xi8/O5VokSz3gfwwPHbKFexrY18fDiyeZ404M";
+ sout << "cAGYSGyYzxkpa1HwuZE7UzUNlyzpuja8RR/8XaUv1XRK1lIkmDFA9cKT81vtXJBY8/04Jh2OsPBT";
+ sout << "mFRnFF6+vItwlsmOaF/WUstZg2XxP+MePh2jGkKj3b4c4Vw9YwzD0/xNlkCk22jnN60wtM5WHiLT";
+ sout << "Opu4ZnP6/woOP+7uOOHdjEJvo2d/v3TOmNmfx38TBCBdAbRpMzy4XDZdsWpP7l0DN9/OnbErxD30";
+ sout << "2sECWhkxKygkNxeq+CoeiS6Zl3mzEKkQ9q/XHccCioWgiODFt6+Wg9MtPHGaR1cnaDDBBp4YQWJG";
+ sout << "c0D1Urc9H4Tpn0lmJkx0p09nBoMHGIBUKmHuhMOnuEuZp9wDYSQ6UnjtLy/+QJcCB8QDvTCg9mhu";
+ sout << "W2HNZPqa9DFD2tmV5e3+pTojHI4O0tOwA5B2OYmqFzdvMWLuFn0uMHOu5285KE0ZDykOOv4Nupq9";
+ sout << "z/rjTtrxInMTU+a7hynQ7Ra75F5nCnEefKXL2lgD90IkuNDpmxHBX5OgOYb9RUwqcVGAFSZq1jYm";
+ sout << "rg7sxuDJu/honmXsDohqfo3/vBVca9U28wnCJ2IsuJXRQwzBLOJAz44ijj6+Bx4R/7U3IbN6nSNW";
+ sout << "5fXiqV1JD/r3AMAf6THZKnYy88mv6kMzt0ZXPd4PlyHCYhLTIzw3AZP36xPjEWBRP8FGaJw5Pm02";
+ sout << "Kh91uM2QdF2jowNV/Ago7rHS09R3B/aWjmGdVTyhCjdogPtrhVHdaq33OftcCuCF7q/wZ8kL56HD";
+ sout << "2T2BxxmsZY0XqSlU3757R/VoeGdk7XjAGbpOeyxn+yrP8/stbyIwxq8PSv6YhRN2c3+H9AYka8Lr";
+ sout << "Vrb16X3aEEP49jxNwEOL4hM2rcgS7f3uayVLwsuviWxiUcQdWcmjTVgTos/11pdXpSaPqSAvpAMs";
+ sout << "m79R8fdq1V5+kmZZ8UZKUdQwjJxeg83dCTDpYRTXAdSdKDLXeD4Kkvd2ebKOGb+1j345x4HUnuef";
+ sout << "ijTom/Xz3fEw1jLgqZd/nVeUCBYcXpozL50LwzYDZzjxERSVCMCgmt6VJBgpvwFBXyAYJql5972d";
+ sout << "vIP6cFU9K1AGAUipBxgaVR/5NBFTIjgo2CEOJulAQkwWXE1TOUXHF6hD+Tp9Zf3nZRmTBU2OXA/U";
+ sout << "8M5Q6nyDRaV/qRdB3GMrYPHZIXcy+uh9xI6OYDWSfv6uZxGfKDzN2JjMNLuK5CEIu4hiLj3JHC5o";
+ sout << "vHJ77Vimbm7wa/cTYx4ztSvSSFwSXy82nSd3o6d3Z7vL4FlzDjxkW+6AfP+8SQLvC8yiKiGr6U9/";
+ sout << "cqFdi3xOg3Ska+3F1sn/OIrDZVLkP5eyqo2aDC++WnVv4Aig14Xf7lgD/nOV16X4sn6BmpIysM/i";
+ sout << "Cjqh7AuHgDkB9kgr7jQXFeGAhdGCNby93jgIRBdWTY61JRX6Isek98cdoL1XqCheIxKckNMmljnr";
+ sout << "lHL4Fi8okkonI43IKV+8NJ5eH/JmUEtrcOuaB9M4rEI1NsEQK72dIWyuuIYyocaeA87R2dt2biia";
+ sout << "SsZLV5jE2O9YE/AWY5fDGAH9PA40TD8bC6bxgaik/QFegoo4tRw2yR+GwtUh9Utsu9LUofRnq6KZ";
+ sout << "hq+3Bqzu3U9grOYNU9MUxNo5jZS52U28A8NSuHBDeS2870sdUvEM2RftmNusMnCDR1bw/tru/i33";
+ sout << "iYIUoaX6EPN2UOJMm1TGqIOMs4QWkDTXYAsIdpBgq2nMfXVDb5y/nqpmQH2eJCgz/7Ly0VtZISf9";
+ sout << "mkDrrok3cIEMKMgvM5X/dA4F6Bv929nkXqqSi6AtUER/jJJARkpAJHJTj0IxUgY2unVx4KP4OuL5";
+ sout << "b1HeIkWdlln4V64OPJnBiKl17p4pX5BGtEXngsMR3kdDBJwliLv6ciLvPKybgggvLSQbQ/vqnZdh";
+ sout << "1x1GlnvrpKfoLFH1S6f4HiDicapOP3wKOp8ECyNZp2IAVbRmmPhjKjPry1EdgySgpXFZUHVEHdR3";
+ sout << "R7cGnH1AGhZinYuoN74cFsNooe+LxT74F4KWSW08+S659M7862DADzaxlHAbd9BL79Tu8RGS5CUB";
+ sout << "i2ATQ4rUgRvzC9OGcwCpOAfbp33+jqLmAWRllB5f5uBJuO2nG7OiKHV9jeunOyHxNnp4UTpblCBW";
+ sout << "yKx4EVA7p5T7qH8krIftUpBbUx38XhCtPMcsIZIMp2w+nmrlZeHyPGVGFrA+Z1+VjmRtiwGCghXG";
+ sout << "JjZ9j+HPIniiHK78MNAPoRlt4G5mtBopMopDn/1cdVroLuJkxX1CpCOu3MmkQQghl/km3jphiehr";
+ sout << "Bj/E9IZI3/LesK3Esv1J5VJCFl+Eyn8i90S2ij6sBBPF+eqY7TAMYAr7MK9mFtfM9orEpIJTvG+U";
+ sout << "Uun4ibgHRw0t6UQ4nnvZKHL4qmZXZB4clGgvF2Sl1hj3FpUhQLr4YWJbMeBVddvi8Ifsl/KpN/J8";
+ sout << "k0iL9nhnyDfLBNy9iT6Fnyrt6wKEJKAmvMTUS/U5YZ3CFAgWEmqI04ESYkZ1F/pVjwgfDtwjbkPV";
+ sout << "Vr+9zaxAa2gT5yZZ8m6CqMJJ+Hr1857AgVw+msikx4c3G0zFZpUEv4o3PiprOr0t0bGSzr4rWnLb";
+ sout << "rPNfMq0mQI2bWIMeZDtiAeMLOwrGdlnBUekp8NydJeL1XNAMMJjHKvNEnopNqkZ9Gt1Kmvl3Eqcx";
+ sout << "TuwbsX2ew5yNJOUD7T9q4lYRdKSNs88fdTFzE6heGTV9UGs6z0PMP2HoLXSShBhvPAIX3t92KK+9";
+ sout << "8986KZvRZ/QVN93Aqt1mczK4SEgo2X6Tm5814X6WcCWiZpH8CcwQjW23psQyguC4PyLhGJo92XJY";
+ sout << "GRVVisGEARz1qjm/GY/jW9kE6alXImoLE+F0opY6QTQXAo+KKleXAWmMEUs972E/aFQtmR2V2mxo";
+ sout << "Ga6EtSNXobw7jP+kMkYoSiav9pYhII3t/v/badrNBhKksNt56Zx6BYFOnfhPrwGkR+YoRz2+x+gR";
+ sout << "GMHLwRFe8lMPR584WNMrC8r0r3i+qrp2TeG5vFM+VKwWLSBUoG+bgZShS7ITSJiSjkuDMpa7jy3w";
+ sout << "1s1AIgtNyvxrKPTKr6C2FjVuhDkpSInVX+Cdr7utecsExirDn7uyb46oI46oNbsD/6HW3sIkvrVH";
+ sout << "xWY1bwvcKlT1+aFWKYtdZEOk8JE60pwiat+MpxgOk9TC5EUdJvcxOP8M8zQsIuQhWlkgNhsm3G7X";
+ sout << "1xWL302yhLrl66DlkKHjCH8+ee7/RCRm3l4nS62XeVQ9ZlzEIUbPP1tnnzg2aFi/VxMxk+EfF23P";
+ sout << "auIEXL5jALZVnr1D93LlSiqGwKoAq7s1bblpIGyxcXOsuaZ8Ls/J7FsxcLD8BQiK9v3wotEzaOOD";
+ sout << "jSfsH5DDUQ/nIdlBwZk1J8aOTSIo7DZ1wZRBSz66ptR9sSqcwrXvFndHulLtva04DfNDPzPNaCoH";
+ sout << "cAjxsOZRKl+ZPi9X/qsVzCr6DijJYeWT+UqbhTOGX/Vrl3M3Mt7q4Pcs0vcdXH+SUV8YlF/zmaD/";
+ sout << "xxZcFz5DPz8NRAculqklupW4Wc1D421TC78Kka2NTJKLzivgisWROR9c6g4ml5cqbrizhraRbvXP";
+ sout << "ilWQ0PSOq+uyJqeo+1JOSH3KWoyGWfOfGxlRffmvsOOuY2oblonsSCz6sYKySsnKeTezRcSP9PFg";
+ sout << "6EGCjFg1WodhnA/KWixJtQ8QsfhaRDoIYklDBWlPBEKxgvHi3yVF3vTyakoY/VR82B8UWfzxW+gL";
+ sout << "ldM+gn3H/DTpzR8/h7xBK2yoOw2Gpv2TcEB5oMtPsXsN58QdoqAsgBcHI/GNdfqaQDK0lt+npV2M";
+ sout << "8NAnFcK3r/wdAJ6OwJYtXPEIbhHxRkEY7GLUT1IwLeV2PIV8gy3Eh03ULZDcmii5xO/MsPgrUSKM";
+ sout << "Z9s+JmBmJhQISUjumZ2SBkAU5V3SG3T8c8EaeN/9yNYMFmSXX/jAjlAViNK8bCfnPUkrmnTAMaMv";
+ sout << "blW6tw1yhXV1WSAe075/Zl9slrZhcCJHYX+mZ1yKDpt7x/QxayGmwypWDo9ukTnE66UKiZ0udTiE";
+ sout << "awN5uFe+qvj+zZLrnM4Bf4ZjMjKzjd67W8UaxMJCVtx2SHeNC7Ffjo9xN4bicDUnpqTt5phMV+hX";
+ sout << "lBRiuUYA067qUb58MbkIE02F36n/ioHNYj15Eteh9wv1IMIpMLz/1UGm6+97m8xIAKH5B691KOsD";
+ sout << "k1ovwfjzGXmFX12KsdSplPSmH9UCo6CsuJmFkhAkYd5vCw99+JVu7RNqhJGF4h6Lg22AqisjfHde";
+ sout << "5CFUks0DocGrgDw3GcmVMFAV1Ix29iekfAgEJcOphJBGJ3FvwRmGkflrIJW5x2sPMaRHeU8Fkzso";
+ sout << "o/IF8537YleWU45S1m+GXNdomkGIFVXZCKwiQ8bRr1MmzMcs+0EkU0r6ei7yxi+6LlKfp2NQOv/Q";
+ sout << "owQ74v4YHvyNJcI+YLLMHaZySYlxIjL33H6b8vjrzsDntYXii8lVRRXhykAiLldwH+89cFk2tbeO";
+ sout << "am9kZeEIlCX889URxsw58TRlwd0Lfag6IhNO/hHpi652lgEYcZu5NPaDQPRRC6vkl/+ocoMmTDhX";
+ sout << "uGhphdczLsCBpIejuQVqI1dhrnko128Jzl6yIe6mpuhzRT1ReFL80F6VRjegnDa67Srztleu6qzi";
+ sout << "OUgU3EDRfE8IJ4i97SsnTFJR7Eyou2/GP1U+ai1U7JTknHbg7fNeD5wjTIyQVE31oOR2WTNELWa0";
+ sout << "/GWO3P6Em3J7vZJifeKTGYWf1124uh+oBYgv+j3vUuX01i1P9kmeUP0nmVgzWs6AGIvbIp6vF+gV";
+ sout << "Uc+8TvNYH+0f19q/lPOBnBT46URHpJ9EZxodLin8K5aTCB3sLjpXiqQDPD9fbMzL2r0s1Z0YPWgH";
+ sout << "Mllgxh7l6bIhAmymXIUX3+Iw9rnW/6ODO1huBbvgZOAUrLMnTA9j54trOL/n2DxwAU7JJ/fCeH5u";
+ sout << "PKH6nM9l/PvCTslH7ahvT/uqGD3/++3FeHok90oU0QxyDVbsMDX+ksj5Hn15ZQZQtYqToONe/OI2";
+ sout << "F+xJHU1CWejRZDJgDICcBYQSI9e6crc1vdf0y7aUeNRthj2xLC0zbtfy7PaIEy/R2E93I3jOoAq2";
+ sout << "Mz+0icoUb3Hckf2St+HqfYPL59yCrEyqoFONN5XMArC/MTYpMn/XQfCQL2X9xV9T7WaScs9TAqQ2";
+ sout << "/qZrrcfRxUaPCVIBtG5V+of/vfYoZzwQrLTioTohrX6SC0WvxS05UlbYo9sDwrchmIxPmrf5vI10";
+ sout << "T4Sc1b7sRfiijqZA+YTnYyBPG5c2qqsmSXLmtctpzUn94XPhS76qLj6uQEcoaYTOwFK2dVTvFpfu";
+ sout << "zlRNDzCCfzXc64lA9oKwEhn2tTH4ddClyYobly+2HH3xz3NQMrLxeYXblIKBFJzn6IlY26a+77qM";
+ sout << "srMzEwRmnzUojuRBO6n7BxGFfP1JuP4FhIpKLTgA1Ql5mf7Z9LrsM2lORzMuO/og7LvjrO5jVs/w";
+ sout << "+ZKmQzSzNBjEgq09UDk3gMG5OV38SgLXQCkUnCUThGU6En5OTQkSAF0IDQ9Bmmyadqp/+V6WiViv";
+ sout << "Py/WYbFDSAb95ME/J9YGfTg9VWncLDEU4otj6Milbr8OLjlR5YbbGkieo2I4jYfuKHR+/C1uXVWk";
+ sout << "HEOZBv2lYRKteXx18m8DLOZ1PO0PC20KRwcep1QSmw8SH0NJ2Gj5L2WvkpfCSwKYJOJECrAc2PcI";
+ sout << "Bzf363+QgmckMylMuHJ0Kf6AAP2xFtKYw3qM7tTUjt14QkC1Bi0l7ViNGt1vvyTq8erWFJYveVwm";
+ sout << "lJQMmyvrHq5g+hbj9ftT8cPxbKJ/EPYGKrtrM8AGxTxrtA0FHqmzjPTm9rHXhGYfm3ALWw+frbVc";
+ sout << "d06D+iiyRXpTTDZoEcbMaZYCSyHI1QQmTR9Wfr64r6XSk+/jDLNlL/Syp/Hzs5dK3P2YR/UjTB3I";
+ sout << "8U70K/yeV0v9rDUI41COSK/akYmpybimqmexTop7zqNUIFOqFHbH7TCqRxfq76DFebeHbE1Nq7l9";
+ sout << "p8HRuOuavv9wKGBJx5BoYl0h+ujVfqWIJHas9QuCbJK8Z2eNht/j/FFSvdlPfWIrRu20Anay6EHN";
+ sout << "bEDl6/voathXXz8Rtc+/xkb4A2D0+WwXHrYh0onectahnSjI6sK35wtf8UF653KENWSOVFiFAwjr";
+ sout << "XHV4YZb+VWi5jQ7470jOdd+6eg9v47AymA+6Z6ZeTWazbxSgfV8hCsAepHyZ+U2Z0B1Zgeuc5rKp";
+ sout << "MhBQZ8xf5x+yM1G9Jtu4Z5kgQ8NdliztMLkquT+mbY9U4eri/WsAQa0BvC6hisueLg7IGqjQGgIJ";
+ sout << "29Y6iqD9fJZuGAgbEbW7qu7J3DyYzGaKyAx4+GOyctio9ChW1v4uq7gkc8XNpmYiGl/90mo20hzR";
+ sout << "vEXjcg3Hh/aHJlVXH0hJ6W4kOuBFkZxptoDFi4vKdSCVWAgnQgWhmhkeI492Y9bUaDTxejYhTfDX";
+ sout << "N08UiRXnAN96KK9iL1fJZxB5MUX7cJ/GvW7RjTVP1MNzy/FdlAN+k6Sb8vyst3Sz3eGHOgO1hpTq";
+ sout << "Wz9Moc4BVabx5NLlb74ScQHCHX3dnKXle6sPCzfbPjM2Z8rbm8NvB/VH1FZ/EzriephAUYdra8OV";
+ sout << "+f9V3o9zEYMGujKxqZysPfheqaA3gKKDVgKGhfUn51dEq5YC61aWqxjO1suUrW6rQpBHN/V/VadA";
+ sout << "hpuSHr2UHCeL8yUzC2nnXxpHCifGCrx1/R+A7EXBjTlrxd0n83pEV5paIK1AB8Vsv7RNrT0bEPxx";
+ sout << "uNxgTVdFUVHVLZf2zuhlqQCSr6zKzQgdlsW2Rbkzqm/RpdwRmONCWkS5IRTwdqLM3rfStz3zt3U2";
+ sout << "PklWaPeSMTUzREfa9xfaxA99Yn8QcQZU0GK+zEe0d/iyeDr+7XPYH6ZOgQbFp3BgHWk9tAgeVx3d";
+ sout << "hOwbut1u5WRa4azjGkYUUlvXLJpehNwKEiaej7u9jKrdQMTRi7gtSYliGdNr0nnw4ADV60LXD0Fi";
+ sout << "uYvPQwJnbICCYdy/Yx0X/HBu0dUS9D15JuHuT9z1D/dqirMKNiWhXlEzC99gX4/mGGM4Q6l53SQw";
+ sout << "0205XkJ21NasDxznXp8UE2e4GsI+N40b6mLvLZaauw9dB2IeNhwZvn7mkGDraXjDnpnnmX0iV0Y3";
+ sout << "OH6OCq5ZBlk9/IIVJpS6Xtec6MiuVBYrhKj2MVn0tI7rRalXXqk+7Plmg/9S/Vh6U5RM3pCjTTf4";
+ sout << "ZEAxFH7LhT/JRpkk7fPxcncaHQVrnrbM8xEYc9/2nptKYriJdXHXecsiH8xYjjzw8ACs/FeFyPsK";
+ sout << "84MjAvnsSWnPVpJe9n51BIQ68dUG87+igoKtNjWomVVSAMlBKtbiNrzFN+4AvESKCufoTC4gpGPD";
+ sout << "2sFkTO5yg8+v3rwFK4aAjBLA6eKzSGkcJqcThom+NfRjrvCtPysEisAdXHc40muhThMiIiOFXwbH";
+ sout << "T4722yaSHEMGNwDqY9dlixhGz8L5G/B3hbrlqCPe2+GHcmmMI58oFMgZhEh4u/1t5nkCDizZ1COg";
+ sout << "+CDWA4n4lcOXfVMwp/8u7GXZMMLuGSJUy38DgjLzy6f8bji0+EEZ8rbFsvMjssgcx25YJ9Jy23eq";
+ sout << "rcwGu8iMKUQ6t8J2sZFPXuFrn9cyFP/vP6qFNWZ1CtiN2fRVbqwtBj6DXmSae7NDMmL8XfGlTD5i";
+ sout << "aiIWK5SPqYzS0K6oPFagYWYjIKqkNRyIAseq0fqTLKnyKmv8oj///TlXGawbehr9clexoKbAM5Mv";
+ sout << "1RKu3/pJiPVwOPXcwCIlQ+BemtjmsnBzJySAkA1I9mpgkPm4Pg4qblWhgq/aVulwN5uSkJP4ZU6n";
+ sout << "reWj3khTO6Svsua3toMPYm6FOoioKOOrMMXQCoIjsybBmSaEHeAFxZmHODtbf/WrGL4YbhUMal5o";
+ sout << "90Ay+c0apxZIV+8d7Bm5L0dRXmQfSUCnP1CKyUEGPaMAPK+DYRZNU97lzpowFRdjTlrJX5aN3jrk";
+ sout << "WN8/xaU0PTVo1LtIesYvyG8n8gBSmj90hw5QLFt/94H1NDvPYMvw5A6oGoQUYdCXTXzrRJ2OEow1";
+ sout << "0/qEzLCWExSS9q8nbsf9Ne5ZC24XA9KHmiJot5v7MZ4KBORjPy/Ub223U3xuJqVGHlSjYuHqmOFC";
+ sout << "bhqGY4yNo/7PU1/cvg6LbtQSriMj8+85GQSheMBI73EnwIHgPOIV7EGEr/GHrfyM1jI763x7Pr0H";
+ sout << "Bcsg5o34glMy6YWSMLd16/Djq277MW4XiMoGPK7Rptwh9ahtgjS+SAl92VTVyx+kr7gZXfecCivl";
+ sout << "gXUmA8sQugTC3Zf7QqR5eJvYA/i+PMRWkLw4YCogDOYXK7tKY86djtBHftkYob6bMj8a7/1/XGzm";
+ sout << "Xz1OGinw9yYz7N19wgkVZEubmK08ZnKINN7A9IU98iAauGaLizHRlQ1y84Sz13coFKe9lRtgnNHr";
+ sout << "zsG5yqiTSIlH1jLyxVh1O94Qj6RlNT65LPJa714IW1ot0XsS8QgX7Jazdb8mqyiBky3/w+VoLWGx";
+ sout << "kplBp4VZN5gRvTIHRBq4LaoGgzc1C5us3loLsxNUDgKBCBjUkywWAD+l7sCB+2btJo2HAcmgbNBQ";
+ sout << "zITjO+dRirzIS6GerX9+zbzv4lC2d5K8ZmDUMHV2x2NKaXIU3mP6II/iBaP7W2VRx5qfAAChmMAP";
+ sout << "B6zDwzc7LNopXp/GgOKhYzjctxbbpOcMjP1gnQ/2F4EAyXOU+iMZeXqlmbJcXk9S+bCK/Q8i96kW";
+ sout << "k8lbmB/ecQxbYJKynBJoijaSzeRYL5wC/IZyqScchpePu8J84p7DEu4ECqd9+vhjp28/u6aCgnKe";
+ sout << "T0mE9aIOVS9DLgIURa5qmWUFvolbrZ698AwXlEV8vkMgSDZyxgRSBdyZcmQawO26rEVQWeAr7dw2";
+ sout << "zQIGGtM4wACd+Vv3e1FgjvkytZOwyo0NLlURyqdykYhZx+youz/Kmmri50XG/hpD1+5gXtVFWUhO";
+ sout << "V5G9Aw6GG3+trxtMJnjCdkaA7DTjTIU8nUjNnx3TsCqXK0OqDMvem3e2TTeXorRPe2zYbhYB5pVY";
+ sout << "lSJ5yYpUrIzJCb3xPGNxQP1qVv/tCEq7IIKGqPl2RS0NihXE1uz8xPMu3cxu1juHsw4Mx3qi2C3z";
+ sout << "KH4LvPoEbP7fO9uELQD6ipi+AmmB7bKabD2AwsHFurh8oFtaCt+KuVy+676ggvwGEFm0j28Cj/ff";
+ sout << "SQzCut2j0HQjychGdngbYIF3HLJQNr8rbh7k1FPpJhdpJc5lH4QXMddVxuydPbwQuB5zGOWZdc6y";
+ sout << "0O4zjCgP0ibSPyEdhkolB5T7Sm6ftL3dctp49kJKtIyGd5scyxMreHGJxeaGADdgqken2Ahx9izm";
+ sout << "VoVdjhj2tvLlJ5FbAlHqir5ZcZIu+prlMkAnW20mXjeWdh5lSCUFGcOTTyxirXkUfyDRFVwfMnOi";
+ sout << "Y8j4v6X/LyNIKgBPY5qFKfbwAjYWyAPYYQVBurLEu5gsYGSYhmtm6BGZ39gnxbBfhB/JkCyE68Pp";
+ sout << "wwAA3SA3Uxha5yMTYK75ClGNsWyI1AwnkfYuYgEyCbBv/psf+I/jSpT3yvcEDKiI7FwToriSgA61";
+ sout << "3ksBBfw9E7Y66wmwTHq9O4alqsltIAVLkj/Lp+DxSc6tZn5lI66aOj7paImjnZac/GoJAxVUfcKL";
+ sout << "q8pBZEwlJoAZb6T3wCuEdgZlYp3MZ5uInqAZSOsqQQ1/S1vCj5pwdzHrHF6Q0FJemS1AO2/GCk8p";
+ sout << "dEaniaohFtWT+AMdEQi5TLVmtpltYP8J1Sm+U0/TazrQStLUXm7Wpc2DkMdU3cZw+ncK5znAzzn4";
+ sout << "dQGBHVR7TMfIpx8anoWGhShI6oHh4zPXYtRtiV0Y514GWxAMB7oeLOjnJTWeiwO8VS9IPCjcKEbO";
+ sout << "zTpPAwMiBrZEnglnKmIGxQXRmGMpag1JZfPRj/XyucX+LreWYF+PabGLPrjSnG5e2D4aDlNx9wGH";
+ sout << "raYpvkG/LJJp81U+s3VsS2vUA6uWgz2XiHonASuttcV+a5vo5tcKUDXDaFckDzjpIJe2rn+Z1Xxo";
+ sout << "X8OomKyPCCGmMEmE3SxPkGCkTIDrbb5Pc9j0NDVXbRyjVZUf2hrZ7pR+H8+jzUVgt4IfnB8Q7/Yq";
+ sout << "uKW+YvA6w3hYv94BqXPHeO6FMLhH14iBLVLp1Yp2aOp1ehJUQ21OgrhNGjMmF04P/2EF8DXy2V9s";
+ sout << "h+ohPrdH13fyVrQWRUkG+7Fn+GAYcpD1K5jvRavlli9pHys/axwdu089ivNpA5D3D+t+SHVGEw/L";
+ sout << "ZcWpKRQrUZnOLTNjJEpEIAkryuxqTcGDzZlB6ngOVxK7oUiwzocC+zjKtmafaH1BqxhX3RxkXy/8";
+ sout << "sXA9pDQ2HIn82Y7vv1+9L+88KXcTLTIUSI6iwrL43h6IhM018jTNfCgdVx2Uav7Vax1kmu9O8mhT";
+ sout << "NOxDk0uYoMJfLPhr/nSWlKjgojoEj4IBmALtiMtqe8L4zNpHTRxgvLW8wMQAyj/woN1mLxLb7Lxr";
+ sout << "UIY+ECqQomA4TLPWXuTsLMJXQyFgd4nQgqVTL3PUdAtwes3xrHTODTYv9f5CLEiTBV3BP5V6lo5O";
+ sout << "Wj50oMM6yJWV4jZypwEp8osWHksLQjPEubu5CiBucOv3abc3Zvteygzp8J0la8wjOT3cd4+LHFwg";
+ sout << "hQjO7JuNXwM1xhgdostGxjYExHC6KB9HDG3OUQ0wtxsrh5QITF3hmlvkE//xrWsyDzTcbWCgG5Wo";
+ sout << "NRy0RpsXsMImYHlzO3hV5wQHz1Xjswc0ATMArZ+YJvXieYgEyhkIIzYlFlc1/GgnScyJs5Al2mvs";
+ sout << "5GFjwMfC7wnMNRmV97SuDzQjKOiwMFochKCR8pSbMzJ4+dW0W9LMvasWGVAnwvQdfIm+E4+O0W2N";
+ sout << "ArEB7utReECczQcBVWjdKqK6/N6QbXobZfvLto8AqTTw5CjOgTpoEybY4og+zNemINlmYgzuU7Jk";
+ sout << "IwiyzZLohI6UaTrPWd9Ck2lLn9kJf07TnWVw5+2gIqhr5M0d4Yz1Xlgy7EfAWMhPEu6MuD0WKKB9";
+ sout << "1MSe1zl2pbq14zDGyICSCXCLTTnLjc5Yj+E92s76igXq71sobtFNGzjQfZtK9VLTrAaeKf4Ql4jO";
+ sout << "KWQJTFYbvd1ScHzbPyF3S/T0CHPDZssQQTd/VBOuspVr48Vl2/MRdLpdzjd2b9gwCh2EwGeJIy3o";
+ sout << "R+AdmgHfA6PwhJKBkIliNMMR1DEhwN6QDY+b4GKnRHe3+k6tChrTNUqZpZ7Eyhret/Hz//LhmPn5";
+ sout << "EJ7PSeJR7K8PKE5WoCzr0h/o7SeiHi9qi+2STfd6J940/2D4HBkYvJOEH4Tu+1XBPi589dh6vnhP";
+ sout << "aQkicmf2fG4KuHO9f9Jlt21I4ksrEaibH+BKFFw2Rq2cHME8Bg2cG6HO+TyRwIROhy9yxtWQEYqY";
+ sout << "g+zU+DM/WCXBCNDCPqy2q8nY19izJrU3R/9ZCUF6Ji4GjEzjjb+mErVhPcWpaLk1FWAPxtC/A+By";
+ sout << "rEiAVG7+asI05YWpap2g7apUyGoyIdnHQ2g8QucKspJQhW5BiF4pi7Kh/zzPQPXkuxQ3biy1pOQ6";
+ sout << "/6EEXB6W8dLKHTHn3cXCphbbQzqnti+yQdGuvwDiC/GWZbB7ePUsUSJ3+b7gcR54EjMeIX6rBUxF";
+ sout << "2xXAs8El87oP8PCp023NStXDjpiRFCyQ9mYweS0bRh7MewCbvgDIGBuJfXL46eVRfj1sdwlnpS1w";
+ sout << "aN7ePzPyTR66zIu6kwZzfv5zw10c2LC9cj/WcujPSKir/nCMFaAUroinmmNiH8C4M/CL4mU7SBkZ";
+ sout << "/hidsIh9YDq7cQRP+vBwLgoVewGxnS2Q8yfZ0K9lAEsEo1Lpxg/Bhr6Ei21nnfaWcMQEVj3Gwpdb";
+ sout << "fqKrvOzQrUbQBADmmksC7FSgEBUTYze282+qXvS5jnhFg23iV6bafe7LOnZmqLivJGA6S0Y2D0nN";
+ sout << "HNP+0t/IutdM96z446RKbwvUrfNiPZF1CJWu+T7Gifwib9XOmMCvamYUqI+dplpbgCxmwbSnMwJM";
+ sout << "yK2++dILo2W4OfcPyM9eSBgY/fWKdcv330fbSE74lTUiInCJr0hECkFD6gThcDVRY1MJY1hX2Dak";
+ sout << "E+r/OKBGTM5i0AlXeZoi5+RyIiA6wRTKEzjznz3eHhAe8gRKj57DUbhXd0L7BK1ZPs/F4y3/GHtZ";
+ sout << "rfh8fPWKi3TX77vcIOANXRmGtpDm3CDxlhcWq/bWxEL2u6wk4Cqqn7xxYLNmvnZHB3GF8aFfPRdW";
+ sout << "iqdGfvdkoEWNNAd+OJd5dmu+kdqFXnoGtlItp5myxabhN/I5sdFxalICuAm+kAl0ocG98Q7v9YVI";
+ sout << "PaajznXTdN79Fn3cFSHlgrJ7yDb5T5mzpk6ud45ux/SxLw4ERY0jykYeAYV7NQp87gSxDfp/xyJu";
+ sout << "LKgD5aTz1RUrkC6VnCDdij8ZnqWx6FlSUdXt7+mDe7GAKFBHsKBUY0B7/nqnP+wS9GU7SqPuHa54";
+ sout << "fxgnq2fQi/VaMA4ZTugMOvCKP6PgHWuzhFOorZKnkN/Au+/iOH4z+1NvTzBiBjCH1ZPzXEU8pXMC";
+ sout << "d+XzQyzL1fd1xS5+FTOq/FV9fm5ihRm3f4qVxdJFmSnNKBdUt6+BnrDoReWLDOpNiOF9oOfADJMM";
+ sout << "c55TRsb7ikJpOUhXHQsldLRMCBQOB1YRiDjLgRXnPM2Kh1/YUFV7dOS/tLUj6Th4Tmtq833I1AWD";
+ sout << "CLADK2WJXlXL5s9HzGBwR4eVwzuvQsTfIFZmbnsjb+pIDR1Ek7JH3eyAEwy3WWATtUfSC0HXjEgo";
+ sout << "lKpcSGD6BZsJ7IkSo7Yf/pVcQbpqOV0W0QyGcnu9IUu8q1O8iF7Nu5LNEFWBm+hpN/sUgzE+XIBn";
+ sout << "3sInc0pG0ehnOT3HznAl24ljHBudSqVqoT/O/K2aswIhxOl6RHw20PypAGvTEDQThB3prX1RuTeg";
+ sout << "kmQnQir2ffAp7pxEtxvQJ1kjA7Xp88ZdE+xOFX/8yvCnCWh7jzidGLOZrh6T/OlPIhd0Ipj5GiM/";
+ sout << "CZqxIuPwEyKbZJ612Ny57dLmPK2J/J5AuWaX/wmMlItloLwNSvTwQ2tSiLUraT98oIVqhu/GSRs5";
+ sout << "pRbWOeTspkUZVC1306DklVBAKqWSDalxyilnxToN3l1+DG+UgJtSevkNqlDpNfH9LqrcRrfmokTW";
+ sout << "cymYpib926UvmWf9lsMzJfBqFg3QU5Sa/eF+BwdMgB8Cno/p7FsZ3hKytbOM5fnPzm2MYR7Kw5Mp";
+ sout << "xPcptC3Al6ammAMjlp51IMU/1wyfDEZslTCZrBCGSeYBke5D9Qz31gl1Oyp+RV2lxhNngKZcBKez";
+ sout << "HjNzzJHm/PvqfxyAILoth+5Cb5LxcBp49XjcuPhZ3sFIfNUs8IhjnT4RUZxJHs0PMFHwx9NwgryB";
+ sout << "4UqLFn0+Xves6a9xW6mIzgSJZXxgKcWmA4MZFR1welVzD7l0Y6Rl+Wjw+qrklobDSNgntToguXwR";
+ sout << "mrSM3jnAYy00HnoxJVcDFZ47Fa1aYpZvAlZHbgqJ42LrR1KvI4Qfe+cJVyV2iq3FNB+4aO6DUVkn";
+ sout << "2dBbf2jmR8fNIjz4XkSCXPpwx9OZuYB2C6JjUkQ9lGfDGPfFcDNA0mjkX/FmKf67UaGkJsG+DV2m";
+ sout << "sxojbh+zm48COhWz0xrBGSdj0viVa10iqcKs4+izaPgDSHMFdez7nT1aIBqf/ys792hN9h8kybET";
+ sout << "GZw6HGYgD0u5+hbXeXVC11f+8aUi2vjGW9gc+R0cAnAjcLNBH6IPIhAzC3E3IJ3R37wqQ5huxyrm";
+ sout << "qZ5AWYXo3PRswYjxwlXvUSwEbc+GxcFR+jirERKaqgwoVOw/2F2WHFQ5yQ5R0nI6uUhNMbTjlF9x";
+ sout << "2MYa3H19/dgtY4UC496uQ0uVJ06wlVRCL1SCRpSn8y9IIvAEpkhX1b2o8NAMxR1duq4U0fUhCDo7";
+ sout << "GtzXFiUKyLgKT6tEZdww8+FztcaEB5GmoIxy25ReE9Y2Yz1w/IjIvyOQvqDxqNMC64ETTqYYlF7o";
+ sout << "oOGEBqO3JnTVdOtwbv18JsefEaqWs6hyUboe3zxvZqnrmSrF2Ezydw6jFhjHyTdI2rRNK7mTCg2i";
+ sout << "/1fxkFC4Rlw3U4NctvjVXqhVG7/NafRynbtBxSVq17MH1Sz3IRDsKS1/HtORRuAYX5/KYcgDjNyL";
+ sout << "ew/mVxTedPDQ+NDsj/5k4gjpHB28TQrYD4R8DheQ9liqV7R+shaUz98cdqoamEcpi+Vvo9eftqpo";
+ sout << "/tfL8h88W/BFEBPqG5u0CrSasq88l2EWUflOfdHaVjPiUpA03DhJ6d7WEXIgKqR4KQ2QUFqhft1V";
+ sout << "x6XQ5c6HvkYEA3JXB9WY7pqwhh807ucN0Kp30K87OElOIDXe7FWotG1Nu0IjnhIK93NW9W4g9CRw";
+ sout << "jDvID7dpdENEY4wzLG3Q+PX6srxE/bjKkWXRrIVe4egRq1yc1BdTPd8SuxHKgVvQYGNlDHZww/Cm";
+ sout << "sdtNn9gO2AN+zouZkXsG5JdkJVoGZq7SoLF1JNiBi0asFYTD3y6q6wvfonilbZSnc6V70VVmjY53";
+ sout << "NUCkcTs9H8acP44DwzIAMSm+IWWynZnEmul7B9+ViqgqsHW63Q7AdBCd1O9oqYaLKa16iFiHXAGS";
+ sout << "AAwYVBuirfpq/yGh2obX9w5LuIfc1Ohmm22xT1tL5Tbx1y6rOFL/LirpgtqOKACxNbxiCzEvUEpB";
+ sout << "XHMNDcX/fRaVFYss25guCVEj9/YsVcDz+lf0+qb88TM8LGqYCb0A2lVHbL1O3jdvBtfw3MXHX+3f";
+ sout << "UqH4kjiyAkgn/TLXzlbEjkGu9EdImol1ikOMjffpoXri7J3Rakpq19cJcXZEfN8Nl/J0IxjghnQc";
+ sout << "/Ge2ls1WqEwMUrKyQfRwoJL8Pe/8JErvyUuIIIjG1PHDPAUDmEHVXWU7wDms6GPAxh5hWM9Gwghx";
+ sout << "xDxn6q999jMjBmut7Xvl2yozhYpzs0KCNYzpFwSOyIjfwU/S1SvpXbV/fOlzGLqM8uDMveoDeLTG";
+ sout << "5LmSt1eI2ZMnKoDquGtw9Up/Wrj191RIHkurW0RubmX95xu9KJaLOSQ07cDP/FDiME9LyrHlT1e7";
+ sout << "4DkaBHyVw9fNhbeEWjF9dCOT/IQz3dQhiNizidYZgJX3d/coX3xjbBKhI18DAtQPLdx3EP6IFcPe";
+ sout << "AiU6yzb6lcthTbZ+DAntjRMbEh4sCd/OtjC1HHKm0foDGYcpsN6R3pxA2fJ13yU0ZPLYSJT+3hCE";
+ sout << "vyjvV1go70WzRw5TJ02F1ROusAkZnL+LVJj5cOIKQ+MNifUgd+jtwdiUhpFs4HWEnDxq2tWLNf5d";
+ sout << "xQ3igiJjm1MTfsrex6ehzwmyueSZKxTBgVRQdLTcSSSiivdx0zO+/TqN/0s5WkgCufzMm6uMf/1o";
+ sout << "eWPkERpbGs8c740XtOwHi/3JxDC135mV1Gfau8qPUA4VRqfTKMKwcCk2j7SYzx0RaQvaNDa94aNd";
+ sout << "a8gVcQhLQcStjdUOR8smPQtqtEoIsHqLLXDZF/3gWIEjQfYjbchQcscXEi5iM4jMQCP1/Gf/oR2s";
+ sout << "BY/eEjhzUNnDxyikEt+idxkKLnuRYJoRVTzfCk3Gdlc556Vz14By+uKeAmmgVVH2+U9WtTMOO7J6";
+ sout << "vlpJ3lMEQ9ANonhWe788Jz9pzjzIHhx2UfmZM20I/t4U90+iTJ29/AoDc7g73bHfzugK+2WpYocd";
+ sout << "ecImcUaQLdMqL8gyK882q967InpmujoXoDo4hrqpaLsowHhWiAbcPTIH6hrvkNYJ++PYo3QGCYwf";
+ sout << "z7z1vc6pUEuKEx02TvYO3PX6e/i7RVBQ5HBjVsuNiAw9ai0fsc9sKjwOml7//NR/o6hdFvNnn7wE";
+ sout << "Qk7pP/kasS/nVGYvsxAcqQBBXRDfDEB1mpkZX2bcNzZ/PrP5t9aS9t7LQYfqA7lWVgY+brfPTdmE";
+ sout << "haXezSHgRgy8iQjBAVdBl+Y972kmR4zZuW/SiK3mkOe92GD0dVXz6l4/HL5VFflONDAHCpM8s0zR";
+ sout << "HIlzwxAHl2wwiKqW2LtUIfnHA8Tb36Uix1nyKcGiYjHtv3+mc13GzYU4KZ4sj4suUs6Yr/XEtAXm";
+ sout << "WYO9T8LJ3zzbxMNJZgsVHWx1UWKixJwGCv1HrEAzQNmm5aDBGYrutpNtcC8B2DKIBex+IGlpTwrJ";
+ sout << "Avo0T5uJCfw3hCNwVpoSvpaOpP/hFVJYI5RLtDQbgshmFI4iFz4pIN55qCA98tYAg7a1m2g4CH0l";
+ sout << "1R1ErZK5A08UpTFnxKWS4pVB1XAy7hqtEd8YTADZvNpFTkocwBFsYSsDU92hTS1GzEQ3+NPIj+pg";
+ sout << "i9G8bDLaAY/bojHUktrnaYL47+BmrKQ7N9YWtFsjFrOaS39NIw6WUf4nPd5uxcoqlaM59SrOyEBT";
+ sout << "r4emeEzPDzF7GJd69qFOek/DrjX87M0rDcyCOKABNJfIBaeEJKKnyniWOx+ZN4fb9meuM5bKTqr5";
+ sout << "p6JLFbf7WFzqgs3h7tOcMhSvpVv4ve5Ap4yx4w8ftImJrW8f2j/OUiX6jtA9shoYI+4DOqYL1mRS";
+ sout << "xYK5WKMnkftUB6lK1ldv0ogi8Z2Niil3ZBpWqhY4+tuEvTLAT8nQtX+p9/gtHxNCyGUV9nQk/T/N";
+ sout << "lN/kBTdZs+KzJ0K+AIi7K1t4dVKggZ4vmqYVPj2WTKHboSzNg+oxAL8VmZAxnY2bS5AFR4wRSLgF";
+ sout << "8yQj9xfyDKTPRK4D7b+Axh0s/ra4XzE0S+M2wk0cmRbb0I8/pG7yXSFzYPM0bVhRuiTVT+mrxcpA";
+ sout << "qSJRKzFQ6h+KQLSYUVAspQeGnIQX70fF1/BcYviKGzwiN+T6zLH+thLJnZX/lgHwZZOtHYnCaBBc";
+ sout << "t1WFi3G/Ex2+UHo+nPBCFPq9TNEWcAArNxPwPb9e2H+ZH7dNgLZjZtGQoEiEI2uwI5l7/X1++/3N";
+ sout << "EWAKPziqUxX1sZeW/T9cLMiDsVajYh6wqn11+RM+lCUKvYZym15JcEKAxxsFxo3S3gbILFXSy6IL";
+ sout << "AkGGUsKtITt1LB3PO3sIare5OP3YAk+PzEVZcwp80V9dHq72cSskofr8hvUkzsBZ/iH9dC1dupTF";
+ sout << "UbI4YOrA0nQRnvx9AQVrPldyzgxv9y7mVakcgLLpd1YOUDS7We9Wh7exElCVuB73RtXfcd7hEjRb";
+ sout << "fu0xg2KT3wOTQy7WDAnidLZLtWeKRqDLxmO/W7370N1TgXS2sKLG6lGk6UY5mEOmoLsBtu6G2bRj";
+ sout << "QIZnC0q9x7sUVMyg0TOZ3698MgxWXpvNqocP7+TYdJeptTkg0RZdgON1WLlCs/yBSm9DYpN6u1ak";
+ sout << "pr7GDx/Yira5gaDGvnLtoL/6L4/0bUH3UJmeNACcLec9V+qxUJnYjQPYPjGjALRfRNPV8YRsjWBn";
+ sout << "eEP8OJG4i1dQAif8ehmV1Q++qHcmf5je2IQwcsrgj3+UIw6urMoRWVwDISw5Xy5Y1HwitWZ6VW2Y";
+ sout << "eeeLRYBIWAsoJAw7rSyLqKjaihO8rlGw3IeVrAHt3k12wX2t2f7eLF/MKv/8JjK7d/rr7EbNKrGA";
+ sout << "iyp1zQFtenTYrjr1/Wh12/lmTnipKS8ZDCJuMM0i8NTXB1fuisqNI35HCuXXoJQmc0Yfu0jA99jL";
+ sout << "T87FUwkmKQA5+Q0HV2C70CfE01ALVU9ZbdP80mvHlMhiLURPjRmYjXl++FKpHU4wvRmflXG2K7KD";
+ sout << "aKccgkYqc+Pw5T2AGX5LiJscIykcNaMLVtH/QYPasG8jfgVH3dENDXagJjuYDwkROZ7Dm36vvMZG";
+ sout << "QkRDHXApzY0T9GizL6V9WWflM0lopSWELw4HNPExg+ZMSE1eAebW58HHTDZEnu/Yrr/itfWUid8t";
+ sout << "od265bhRmkCVeEwIL6Kt7DBdDVDu268mwE/OlCRmE0Z1XHDIR8ggVbDjoQHV8Fzr9YCM/36qexHL";
+ sout << "v5VxMx6PzZGBMIGuPHIiqnqaDPHhVpMGbD6Xowd50iE82GANJrgulQOlIg66T5znMtXg6g6hOqh4";
+ sout << "WL4qrPm7T/2mH+T5PVIR/D+VJSoEtnRLdnO6zZnIZp7tj3jXjhNSo+YGzZmdp/H1iuSqC6GuWKue";
+ sout << "agUBFs+p5Zz/yMrYI31yx1URhmsLJrIQU8llICpkRo3uqhPzpXcZs/MZIS5pWniqIfGnqUtzeusS";
+ sout << "bouAVAOgf2rModIu5NWIwdnf+FEsDsw4sYDEI0jkJD8StwBgTg3uiq1jV3FP8nHWgu/QoWWXGtpx";
+ sout << "u7OR7HYi+Aq4lK6sVu2qld+deujeO27r4atK+wQfmbuPyjJugTMJEdLfT2SkkW9mN2Qx4jPmABq+";
+ sout << "5tCUgibAdWz5SqJBDr/jxW8WZ5YCfAhLifSbTdcM/IUF5Nq+NCT9vrcWaUak2K2uhfvoJjN7DWVW";
+ sout << "6KZMzeN5XIRKMZc2ItCW7sUmAy120f3LWzGZtlX49LIY5nsawUDOrPIENNab3CWEPAuKAciOsFU7";
+ sout << "CV6BvOxRekHvH/6clgwrfSCGgJsru77XV43H1/wZciyR/4QDMXPaewDGxdH1FNddnkZVAuYWrRzq";
+ sout << "PF763f2SQmwM5GERk2bxwr4eeeBWBRqMCK4ZNWpdYBseupT694Rrk/bnUKjuo3kwYrs2LzRXDJVz";
+ sout << "Fvj0UjorGcr63LSXNyLhyBGo/Uhn7dgtVUaYzsY5rChqvLthaBhab+268cfGyYhacN/E3vezmySL";
+ sout << "J1Bj4twQ+kcbWWQE/Jx5zGkgorM3Yu1vbjilzv/UbmNb+Cul6MgnKDRFS+cGQ5UMw2DD9OyTKJZ4";
+ sout << "5k+BcnLih6kE/s4UmFkeC4QRqLMBBhTlScS3wXS2lFsvMd/BXNXWtyFtk+dUP0AH5tgrRsaY2/h8";
+ sout << "xTgN0sMK70yvSzojbnExkON5mDszehIEHnfBlL14eqImQRiUKlGQlP55T2AKSYBbpKFJtzy+/aXp";
+ sout << "SxmkYMQcvFD+AH0oaqJFaB9JBLT7JW5D4ZwTHpMX7AS6W0LxgriTgPZnSqVgosl2DZLKBUt1zXjw";
+ sout << "dbSmrROYcUAXJxMOAj6jNlv7nMkn2xV28QS7lFE+aq5yBIX+UBTF1uqk1XI6OddMKrtzh0SH9Dey";
+ sout << "q2NWoOiH5YIxHHbWx2c4fiFrSCo9v2JQlk/rv0GKz5jX4GEI9+UPWzr4MA6GzJrYS9cydN/NXwY8";
+ sout << "lvHx0e+zI0aiQkbZb3hNnWsjQ5q2gk2pj4yT4zCFhdL9NBTAwWrWqjSi/Zf8BU5R23WHHMMPBfEj";
+ sout << "1gX3IdAZXw6tNUseFnBithp1Zo52/Rv+OUyNIC5o9whYx7T+e1Yyr2ytlKeiAHOWmiP8h+Mqnb0l";
+ sout << "BXYuP1fiYcGClI5Krh/05hb4CCW0n385qHMIGDiFTw1emVktV9OTiM0Z5M8PUHDi7IGUyrJcwXyr";
+ sout << "Bjh5HwdrsCA7NdLqE+8QPYnAPj+rMJWw+xrrlcPSpRBTTPTPiVnn/PXL6khN9lBiwz6Kn+X61bhO";
+ sout << "8yN+OP1IWu/DZUzauOoVqhEPEARq316Uj15VXoPpy2oa9/L0Pqzna3d+8/VxnSlWWo7i7bWEXbc5";
+ sout << "N9OKxooZQ6XAFihvJMIsVvkYp0oAQ4AIoOLUSm0w0ejFIWsQ/goNmLj+CQneb3+YUpdYtpLODhln";
+ sout << "/4HgBsPjGnEug/fzuWbEqWXb/e+VL8RUX/r9nXe2rGeQtdQkdqsNj8avFShnnth+UNk/2TRAq9Cd";
+ sout << "eDYJMWOQgP9NSecba1B8FGmBdvC6gRsp06uKfGesrktOPlzgKmZPoqsjHhrLLV58KwTavZSoIriF";
+ sout << "Mzj6ZAalNMDEqUGuhtXZrhTVoRiYsy+3jjJ8IubSS/RxiQfPS3+3nudS8jzA5u4uHJcy42t04z/l";
+ sout << "NkN5/kjem3F1fAu79/16XeDD/wk9A7FyjqsnyvNXsiX1yKcEC1Bw/yxCh33mgcAlh5ilP+gWc7sg";
+ sout << "QQXi/dnqQebktoqyUYklPN/cYQpCQhhY+YRp1wwbLJYWYrBXiN3TbACI1koRROZw8pT8YKG4utqc";
+ sout << "P+IjOrAVYg94bzGeuRVI/T+mpmGHs+lsgUY/WLx52PqnmLgBORLHZzM1sFp50ALnbYah3gAR19rz";
+ sout << "twGYVQXIIhK8rewz/RwJJYwSvilMiJNx5UqJc8VFaEnwKcr2QTqlJ9FezDpuE5kvqlYl4tywsnwU";
+ sout << "ta1MB0FXAC86i+4pXWfaRt5Y1g48PGEI3TdjkpiRJwEQ6YGSoDngFX5K5l+rZePE6FHNLBoRcIp1";
+ sout << "5aNK7cS3MPyb1MN8zhjX400wFp208a28cBpCH0KmHvt8+ip26ejS1C1/aLvdz9j9sQtixw7JU4Jb";
+ sout << "ytPOVlxKjAzFFFdaQnVEAkFCKetYCkMzrARSzcLQJcBhhChb8NQDZoQWxe2286/y9p1ImU+jHlzC";
+ sout << "iTREz2ADRBGQO3JGz2h5GYjU6YjZn6ojyUyMECTZASV8RbvNy0/8yYPNONYWm8v4/LLOUM0fq8iv";
+ sout << "xwag9U6L0Y+TWtW1YD13ThbTjTrg1lTHKzyp6KeGWNWGH141FgXz9+r04Qkvulcn+rGimg4SuvNJ";
+ sout << "XLotrbWuhULd/esyJIAqfTtHzHZMSBoQ0WauyWV+so3c2rGDjo197k87SJkfGp+l78kqj03jh6ev";
+ sout << "WyjhQZizdnuNdBpm26GnuDYmZdEaOAADCtkR9dvMWdeNsDCjyRcCbFEPiUX2xduSbw6aulm6mW4l";
+ sout << "CckSOv3l0/pfw8bTKEszFqeXiPEI2SwIbSx9R1nWd62VVDrYmAkAXzdJ33QOlJm6YJT97+QuU+61";
+ sout << "RVXpRanpmC4Eh3jEZKV2BWIuGPXzrDGgAHoHNhRG6vJ0Kp3RqaxFv3cPrLZdxDnQhZQH01O/dZ8S";
+ sout << "swcpb3/D5GdWvUS6MsIPhO+nhoabkyxwvbBNzyaLVDoXZK6taVT2GWFyYqguu3ubVBGd0H0Mksh9";
+ sout << "luAh4ogG6drswOFxJhR4S8VIMkF3x8PDUj5w8WTC2nlhxIYVh1fxvdj8fEfIH7WzBElS8vpZ1tXC";
+ sout << "1JSgHngiEYafDFX4OJeHxKFu994g18YvLkJDdQKzTDvOiD9TaKnnaSGGXZuasKTtD9z87GX4O84P";
+ sout << "rye5DvbYUP3bZx8GOmhf5YsaLI6b5m+iX0Mm8ulB0hOla8uE6EfLBovorLCKke5iPFU/gynxeCkR";
+ sout << "8RL9zw58ATEm3YuzE6nDNaIekqV7Q0MqCpnpAdBPxjtEGo9yRRPeu8SSSGL7IGKUUcr/6Xp8ooyK";
+ sout << "iK4f+mB0hgO0pQqffQHkEXxGg7Zd/eTGAO/n3Acq6bjGr03T6LEO0KFJl1/m96ZEBQK/iiV4m5+V";
+ sout << "bZvr6xOrErUE+ih4g6vk30cSQqFvyH574K93bx/uyydqzyPEQTZ0oJmT/KoUrYti7CxlGwrqPDTZ";
+ sout << "AuEOwQGqqxoW097Ql1bpxekOevJPSL21snAT+Lf7JZ/79YwZu2WZnWoDQRNNm4nwn5M6IPmErESJ";
+ sout << "szsF5VmUs71cj2/gnK7NDU4c1kLjXSsVVnN3/VL1mOnYV25Nh/ktPRfTLn1TDNG+rkdJDjGcjjSd";
+ sout << "JY9Ro1Rab18UKm9nE2tWjgbNUQbzsisy9P4F7cBLBd5K6y47wRYZ7MamnAUZMUEDmIiO1SR1uLey";
+ sout << "gugfd3Sb93PYjImZCjulVD/w64IThgqDIgVd1BOdAN23laLSQpfuR0xW5k6GqGtiP98VwULbV9rJ";
+ sout << "thalp5t6mwsOdo46101N512/p70XcZuX/VyFxw3bGO597RbO2nj+gM4UanMOfTEQch/kaJsk/WTu";
+ sout << "m4QMSxd9SAn9/aRXKR55Im98Fx/m9q2GB0vn8RZqfqHAqjmO6F3usDuUFxk0kn5Hq+8jXBephw4/";
+ sout << "YCOP8eBKnY9V2k2bnQO/9BWhCyuwun8huNz6keaC+qa9PFFwXmAf54BIYdgr+NspegaRej7bOISM";
+ sout << "WpHZ77YuKL04uozXUz6B+0Jy/zDzI9EgLTCk9L1CUpGPSoNGpDfb9VqkOdTvjQphSOPB5PxD7NNC";
+ sout << "tV4tl1nhB46JCCEalhhsip0/ZEtx0Hd+UNxYRA+qjaGiEXDPPDyjbYYHWNRhYKo1SDJLVZG8hoVh";
+ sout << "stl5hebAI8t8rCBUF3x1HrMRdrRX/GEzLfx8xZgk/YAegRw68TfN9V1bY63aRS712i/twY6KDxD0";
+ sout << "V0DeubXqkK+N1ERAn8ygvQ7EJNvGbp0OTu765R4zv8y9u+GqdmpA5E1Ti6+2l/L6008zbYQVWG6z";
+ sout << "ejkRAp4UjMSDTf2zTNjP76zzQQOQwsmwodggF1paT6UlXpb5HHp9osSrdwGXb06FiXf2uVo1rtSh";
+ sout << "Stzfee1i+BebVOqKM38qGjBhXwGiiiZh/mwuG4Fa4wOKlrqaTVEHgGm5N0uxr9by1E78BXj7Nha7";
+ sout << "XuDtqivJf0brsiAUNNtfUjBT5WJrj+iYT16Zs7ds6Fm60mdF7vSBrxXPNw7ZuueSA5xuN2bkeP/m";
+ sout << "CQ/3N3lGgTQlMNkMUzgY37ln3xYCXGcMpQ67ACMf4gf3l6FhI0S+JMwzxD4XMZBTxgt/2YX410Ia";
+ sout << "ZijtokULoOBQRqLjxREcBBhm+tpSSU5cRb071EVhz4K+aSN7PfcFOTMC1mVGy4cibxeRIq5Un1o0";
+ sout << "NYSkHLiVCA4OCLkemWvzxswd9Fduc9mK11xyqBPu688yHH9y2e4Fo5YjiKiZynd/kydWuNsYM9i0";
+ sout << "U9ukHpMFmrDmIPZJMGDgZm0I2QPvoieRcpitQwNYh5x3cvsEs3eAAc4mfxQZUR+v0gClhfu7wSn2";
+ sout << "XBPPNVyzE16RK8q96Z+w1paaFKHR2t4TRGhmeCcxR3vS+jvqpGaYFJ+YJe8hvJYK0IMZTBSPg6Qx";
+ sout << "Cx/cwlr7gStMvrFBKjLGDMAsrw0OyDbIe6/E3Erq43GHsAplYSxYAvOFZUwX0b6VkfAB8CsBnRFb";
+ sout << "jnZmZP+cZPIySLLGJJm/oIs/Hy1wYuyn2XpI/uCuBDZifE6fnNzdpzQ0BpH3qjaIF4OV4gghhruD";
+ sout << "toj7yNaS1FZl8IhQ59Z7P1eSvQNulTHYsfBw6E8RE780AO+aRHAxVtuS08RWGrde4wV3ma1NzMaT";
+ sout << "6fHsCr9IYY1qACpBtyGz389+cnXpeN2VZczNw7JBx3VthteVNdpX7cpftA0e/mazMuKLX0vcBfus";
+ sout << "/p3Vj1SxPevb5pslgkfBTVcFxmEM9Doy5dSKQ32SVFgYwTppEuyviNKuUzQWwO/XhsL50gcuUhyU";
+ sout << "ELpBLIkpABrbGRa9K9ye5qLWYYzzBzlPoyhEwt8cWi4hVnafg+RHEJ7lXXQ5GlEMlzxPJw18LwZs";
+ sout << "jjbC/PE4abuILrfHlOygD1CqQeYzzBN+gwwaTbmquBtZyIMvGr5FrRqz6sRP/W2OvOBN5iVSUa50";
+ sout << "VjKdnNUwSkeLCU14Lp1zN0WQQVVNeR3o1JfJsGvQNjD74MnSlHTFSUYloSFgHbM2ANeKgiYN3uB+";
+ sout << "xLb+hRJPxIwiTshZyclnLmv5mh63OWU+7afdA07BMdmnK77uFB1rdXFJIWUBiUG8u2yqUk3PKVg6";
+ sout << "9rJYvVu3eA7mjBqRvL//koo7VJt6g1Hj+8lnqu71OwsodU/t+W4tmcIKP9S0eafHEQC6hYCu8b5v";
+ sout << "QgVuWf1LnTXwTGbm1ohar6OTxyZV07nJGC0xscQ2yzhxnssaCoAw5xFUzFrgo8a0J1Nj9aOaV40C";
+ sout << "3y5lP6NgiO+3r2J8wUcKWEVFLHDAYMXiMfe3XMp9/ERswAGSkzfXSxZN8T4RPorh+pjPH7sZJExm";
+ sout << "o4ZOdqaWLdLS03+0kRuPXm6FvZCaMY7Hz0e2Zv9X22KYmXluGqcgErFTjYWhQFhcpt8oZA5T3mU5";
+ sout << "NDE3qI+bgtkN9vvNbhIrrOH7mKm0af/zrHA8nnU33sc6Gnv0wfCvolpiyTqla+HU7rwapX0+Ge6j";
+ sout << "SZZht4l8OPe3/mSmwGjX9U8xWxiIhhr9VbVxZZ9UTnAhfcceB3TrDA7F2SxGmfBSRvQ50uRp9Yi4";
+ sout << "14Sasx4kgF6e2wJh7TBRfDMQM6dew3rmIn1p7l8f43AcBe7T3rnWB6UJ5eh14+xb6HwyT260Njvy";
+ sout << "vF8Z4nTq/1MCLv3c4lQoO8yuhFA+J3rWlYTQmc2ilfWbYpdiAOxT7k44QYoHTZDS58anIywl9J8Z";
+ sout << "uBYa+giIPDINPDLGNobNEPl3kerfygAFfVkxd33eO9LwDAMGAZmbywt/OwRmraCFhT8Qz9jygFse";
+ sout << "mT1tZ7+hF2rfTvyF4OiTz2nutfzVvYQ45DLjmbHFQsHMVGzU6xdLQlZYWq+h5tfVng4zhwIge8tB";
+ sout << "mA9ndgrTZx5tspm45fVKTjE3MYL5qNsmzghyfasarz1MOD/2wml6uzWZjtljHIzap5gtIchaIF3f";
+ sout << "JMOvFu8Y4R0Fu2UNkbOfy/YLRnKlUHEChwTeEavSkaHBkprvRZJuMsIWcvEZHDIWdRte0qfNm9V9";
+ sout << "zWmj4B3RchqvXh5XsoDkMqergDisI4gCiHFIxzzfJBsX8TQDUQIvAGk0pc3ib1/6xwCvLs+NXC+8";
+ sout << "0sI9e/aruBe9uEXGya5n6Vqy8z9soOoBCcFqc4B+1ZUOyN2zpIxVFd9/20NKmp1W8RjRforBvF7F";
+ sout << "2E4IkWh/3RcL8tNyPpWkAA7Snlm8oO8L5cc+lMx0ph7DzYX5NcIc6V7otI+Bd7w8xLSVxzfX3Awu";
+ sout << "S6bjrU2af39KbPFq2LpGOakR+1gAh15sd6WYscozdkSZu7kGpuEppnuMxYFmbMrCxtw2lDt4IwMY";
+ sout << "ttBHEP2XwF5z6DQpRqsWBMcuBbztZ9BNa3y2aR8pASldncrnRjYQ1Iq6KGrHeau/uq80ui3VPbOD";
+ sout << "IrV/JeNFGet12rX0zneHBGYUxG+Pbso07l1fJxd6OgaIrwVksovd1psBmD6qYsbc3V3dJLacNOdt";
+ sout << "fvFnHWz6EDWivzMDWvumzAO1JPVFB1TebaaWaINSxaxS3h3IKUVPTu+2Ytw4Plwt6fc/LakedkFH";
+ sout << "fxE4uN3LR8Y1bKQN+/VvTHXGUtf+UyZx/VWHaxuiqoxXUazpT+l2muie+/2l6v8JUCBTUYwa+2W7";
+ sout << "WWvrBxyliD2V+KaN4dybWey+3RIp68UgV0gP8h579kezx0+Q2Ku60vjCIKJ4vJB8Gt06a15lHOmx";
+ sout << "WcJq2DmQuyWTXsqaJPuljuHUhqYWMPnqY9gi53ddw9SwcpgE8cCC0cG65FKn23C1Ihh5NnuBohYp";
+ sout << "ly/QVOFi/rGW9kKrF9FW7C93p2DxgBhygKC5E582EIjRXm15bQ5bUSwxfYfQ8VDCPlL1cR4yhpqf";
+ sout << "CpAd5/ls0MJ3SFgUS4e96011A5intr3a3UMPqdMKWrwCW6cP+7TJKC22+LwvAvyS2dI+NdyLt0qY";
+ sout << "o2t9/ml4u0bbBjwl24o/CIvm7T+045fz+3h4yrKYYPSbHPgGiN0+oTcGhtjBgtil6WCBMuzgEn4Y";
+ sout << "zVaTD4gQV2l6Cnpi6fi3ZIOwuEi8aqfRdNrPc8+Rtik2wWdmbYZ66d52HweeALKwfzIm1DESb/Dd";
+ sout << "qc+pz+yVNmYp5fffsjf70WrDyGWMDqSVvj3tXMkxKiKRetutfs0Tf3vGDdUEdTmtw4rEca/1K8p1";
+ sout << "eNpotuLSio7xp4/gz4cmq3IUEXzk7b++TAs6GAq5ddjbcTJoyRSnOhu2oetJGzWJSglrWwGpAAoC";
+ sout << "l5jHFfMXIyi4lggUHjIcpEosLLxntEai5cP9bcGzTw3bnnQMo2mI8bCKSrw3uzKapI6MdDJ7fXZD";
+ sout << "aR+zuSlVjO98VU+BugUHWJcrHneh7tRZ5ijux8t2d6y/iGeJm0gs2C6ukA9PTwSV7UxnmVOCsGqp";
+ sout << "kh37fINlLQWIjS+fVQZwYcm5XyzPzsyHa/x1DZ9CJgSaLrOCm8PDPk2BHFSdoYMrUGn9CSL6zRNN";
+ sout << "TL8KprKSZLrhrk4wtgRrTAkO9twMovO8Pjq/xgO6djfJygflU/Yn3VPYjg31O0SQLXGQLTL2SMcf";
+ sout << "NlIqmLEov/m6A5cs5pJF7be1yX0XneDEa7arl7v4tcvMw9Ot57OytQUTbFu2iUb6zviTVWMEMJST";
+ sout << "eLj7c/+RNy+lhBjlxG756YIjtWULrrdPBkie+AYHMSJ7PnJ/QAl3D126W7D9ubCjagGVZ2cLX/ft";
+ sout << "/pLcIuPRcRUeL/jTGshsfijbq8us89JPjLS08HMiaaPhTtSOKbyqHwVEvfu++thQZsu/3wM8f2a8";
+ sout << "CnKdaRAkLethTUyv2xNyQmx36Qg3LVGf5W/tzC9U2AQ1XRE5l/IoL1AJ08JdQDqHSAEAiw0LWMIW";
+ sout << "a2alrr54YOp0qiPcRTLKo0gZLr6Sy4jKmQqEFrPCxqPSGMe2pc/pzVuxlYhW9fDMjE1vvfzmWnhU";
+ sout << "SSTXpxONjtMDtFWol0pYyjOLyJcATXCrOK3PXPX50e+S2Xjl1iJezlgtdqWVR6mK8yJ63GyAtjzT";
+ sout << "oBIBQjydPb9tpuVY3Qy129f/Pnw/MT/6wggYCZDuhhNmOqmSRbm3runLfwSX3G8XbN8LEw94wy+n";
+ sout << "yMs17FWzTwWk2fMEIV88TuqsxDPW0Ko+a+OuiOQ+7TGVdOLxtqmUDW/3n5QA6Ryc2kbaFhZAhONU";
+ sout << "zJXyJzll6UPZfUsmAiX3u/qyTWlbB3EKngamgmD/1WNGX6ysD3RYPu2olopo9wOqihwweOtxfQYr";
+ sout << "QdXhUKreENZAZ/lOjn0tSfAFsB21/MuDNpuz37dI0ACfvKyPdGnFpNocIXHWmSWZbOB6OypSbzMP";
+ sout << "5r1fNPRk9qMY01lKWRz+lkFGzl1vbfKfSO17q073cCPouHosT+RpE5HBBf6E0yG84m5o3u1bLUBb";
+ sout << "8V4YyMREOIa/2wkUH99l3YksFc3eeUMzHSIoHrvCQq1CTyaed3tNbZw0rHPLzwSSgQMZAhsi5mxp";
+ sout << "ZnMABMsXSHdvgBRkVn6PH5rjrKAcSnikddL8F9/FXvJR21nKfo78g6RULWdlcc8tPQR0E+PyOoLW";
+ sout << "VR3yw7L8Ddl1zHu7hoerz+cM4gW+67kjehlzV+BAA70h43CfxsmvZm0Kaw9GOkxjDs8F5b5XRtdN";
+ sout << "gxh9Y5o/vlxI8f1w388jL7od20NPAYSqvONaJjF4nthDN+CqlNWjy3K44Xeghk1o4smFlbJM1ZdM";
+ sout << "taj+ZlPozWzYEugvDe7/9woqdFtYqCWDlIAcMGqw6QGFGE9EVwP25LeGWnM4bcSWkdP/Fc+Bpssq";
+ sout << "QaRpCt/+Igw9C26fM3zSM3y+RfDs6dseYokawF7xp7oo++opFuMO18Flo9dJw9HeXMZ6R8lXrNrD";
+ sout << "l1RWaWakmYp2KgPJYT9USAV6S0JI0m9MxL0R0t7XT23cWgtQsG1m/gLb7HT+CFNm4SYnWMGpza7N";
+ sout << "4w6hXTItZLuRrC3pH6AF/m8MuwdlKJEkTn0Mmu0e+MVe/DJmnA2w46oxML03wNy4qZMj0LywOnIu";
+ sout << "Ip8p1oLIviccDGpnXqZORZGVMqzuidIfNBSgalxT1A+ySEAVroXtGjtW/L7sedcTElNyK+aspa3l";
+ sout << "O10XSF3gjgcZnN7W1j3wiqePmtzL/RlZj81ruAoYJRWaNUDT28p67/k75Uhl4Sh7qWwc/mJnG9qm";
+ sout << "rvZSX5GiW11EJs35JJ08G3UHAUa98VEIOesPMaL1Wr0CVHFVC9eW2iHwEq6vk3JrGfsKARrZgg9B";
+ sout << "YDhkyO+3c+qNgTk1WMC3MXaSZg+mwqwx0TGeJRg6/+rcKr0Y+y0ZEDLz3kMD/aihZ0ictCZlcYAz";
+ sout << "8pqdirBd1Dbi8AaMCJTWHGWaOIxmrdU2XyVWVk5Vpc0rmzyc3ya8WfXjkdDzXKUS1IIFi34b3/7G";
+ sout << "qYpJNqtVAPT1TXnnM+2koRgGaCARu/RoKzl169YpDYPHVJuRJDjy6lfrKNEqPzJY9phbh6WhmZ4X";
+ sout << "CZMPrgnrs+0Bs///61zNUJA3fArNyessRIkZqSxnKjVOHz6/LUB7/UzJXAd/XfMYi4yNw72BSAjJ";
+ sout << "Z2AdnjGIO9p0E2KIOVx39bETt8l8JKOyT4JbimwFT0IN1AcvwH4wgwtKYAJ29L0kSNoBmMwaKCAj";
+ sout << "1I1b8usj6EM/OUjn+e9xmLMkeAUo8HM9BvLP33TuKJwLINhNcggbZVeaFSwBsvj6hEQ8dN06eLJ/";
+ sout << "yIGaRsni2n6oXeRUo8/TVWmNIbloEE7mQUuttR1pVBdkbJsHGWm/S5hwEmsKKOpvQo44Rao7fsDK";
+ sout << "cCtKATplclwuCDCPIogRMkpHRSSrHUKE4tzdA7rWWj/jaYc1Sw6hG7OaDFSWuP09Qq7klmC3F+0U";
+ sout << "CIAtQbKJevYhCSdWHfo4THHFR5CeuA+shfbdWyPNmP1HRA3pvo52jSsJi8HtPBp6XmGmM8dN8Vaz";
+ sout << "pG5d28HH76fj2Ny9McHrxGFaV/WhhoST0GDqN4PnqV1X6d05aBKfwRbqdWo2XvmrC2YNI7Ou6+3M";
+ sout << "Ovp0NH7MHXpLY0B+GMf+x2X8hoXkffnGCa5CQ1SwTbZRkeGjejwGhpOybsOl7AaSA/fqymEW9JMF";
+ sout << "1E9XEtZtzyATkj0cBUlofwC6N3N//altqBAbmDKrHThV3SDsvn+tpPiZG4+pQQOWqQomy3QAWwVV";
+ sout << "nZTorQIvoPMUdJWPwsVLaRldOrUEjmK0OF36+QysxIJLgj9x5uQDymixyS0Pvu6ybdB5Z+ggH+hs";
+ sout << "A4wt9pW2wbaOGU6jjYN42C/ehSOClqGP9JE1qejqcLRAXm+JDDFZGmCBfZRw7W6R39cNfrbbACVF";
+ sout << "de9yicnqvNbpaeAWAmfjrwg0QvVVmmlvsgDinO2OdZ+TLx0KKj+4+Zrt0fFfXQgUa2oIFGoeCuQv";
+ sout << "eyjv6ZUnkYHtJLeMDfuhaxaV6dIG7US6tdGcxKxw0/Z3Cl1AqyDSOSeZIfABudI/tNrQF3bk/x3g";
+ sout << "Kb/i4VNKLwEq6O1T8HrZSJzTWus7XcQncGAOJWTsuC0/0Cv/Eg7pY87c6VtF2/kltcQeH0qWNix8";
+ sout << "qHpkEC5Nmjm2erJhQ2gsdVQFrVjxv+irSaO7GLhuVwFgj0pMzKDQTFvd49DRMyrwb9cml+u9MZgs";
+ sout << "Uqwmc0zXzCCT51uF3LMqVbeDEgt/Q59+03U1N2QAJXtTScjJO97PYZSJvMEPONjSI5AeRlRqpD23";
+ sout << "4O5xrQGkzb31WrZP54ZyZ7Gt4T4bxdwVIZSrgSrGJt8RjREWl6LuDpuRsPgnt85EKNRjf+icgibb";
+ sout << "sGxnWWdqFdOLHdPKDyeCTiJb/5dc9YvjVxWINlgOMxdYpUU1OZ1hX3OmDDepo9HP5OWA4GnGXR8Q";
+ sout << "cNS85y2Q+DS7RF+qxSbIO7hOcNL2PyFxScBXYyrkLUWWxzfc0BnyCaCg4lrMr6uH8vnM+7u16IsY";
+ sout << "0f/y4nxYNholE4Q9JlbhXVTBL/ZZaC4qU+/mXrEBkUVY/xj8NqYSicBJpv2c0IYwtf6hwOFdMm8H";
+ sout << "y+l26vbWkXEGS9XV/dKKKLbW8EqhKxtlRGB+Tle1HnOp7HhcINVwH97RL9vS8EurSNlSWJCHc/1P";
+ sout << "Ui+ZsmkNIQtV6tATb57g/AaResCtNWa7znBxzJUW4stpxdxjD7fLZxyP1dSAlXDluOaJ3vaUpdnt";
+ sout << "JqkG/gHcvI/0DK84ddN4VLKIZ0XTrCxvTQJLJcUhA0ENgzdIPQj96CN1XvJ/DdFxx4YKBnb5qyA3";
+ sout << "51z6P84v5wm+ciOVpTFERffXwtPPfro5z7/ZWazbUjB4AcOFxj5Sk4EweqTrVSD2Ujv3quBT8SUj";
+ sout << "A8Mc7LBMtANusOZeMURs5vqoI5O5hwicxsnqHJeGe7FMRRVH9lrM/+3W5dfm+jJ/ntoZJArHCrWQ";
+ sout << "wJqFWJksHTraWTJ8crxkGSgZzFhSHv5pkz/FfVPRW91XprikZvTlVhgyTFbB5gVGeEu1X+M1m+29";
+ sout << "8Gqo/AYNTsXS6KfOWnfc+jfQRkkjiLsBS2vsnV255hPBz7Fvr0mqaN6AawiESUSXcFBnfzWQbOdN";
+ sout << "wzA468JTsVoOz1MUXZ71hOcXpGMVdKN5ABH+8imVfDYGVUUTXv1e1Z41jrzXVphOQcQA+bJYc3mP";
+ sout << "K0HVQQfyuwo73PTJ1pINwbi4OrAJvTP1vEuo9xMtyc2d9zIN5lVg0gTlEE7uwGWVctS67onu5vd9";
+ sout << "cQ4/+1axVD2yy/Z+s9JGRZ78DA8rIUqVhOTWNlqmqquxf/mh+nY9Q3PdHfcpNoqc4g465Xe3CvO1";
+ sout << "cBGJT0vnZw1vV7rtQ2FqAB1LPAp0f8nIT3VExu/mABWauAImnDE7ZiEnOuiuq2vdS8WspLMjktCg";
+ sout << "vlvwF+7AGxfwDCImKSzh4Ph5AA1VuYW3sVQ5I3XuONfappCoJjZwiV8jyKC91rHY1/jP4eMb5vwy";
+ sout << "8T5ciRyzv977mW5MK5sx3ujnlVFWr8y8VDhp4qmhnvvswLIKBRbrmW7jkG8Jg1YB7mjY0rP0sVJl";
+ sout << "zICdb33DVPWxgRHDjyO189HiKWkzbH7JTQfKOBQwufTNQ4v94HlyUYNTzHg9S6nqI7LiBq8zO784";
+ sout << "hGYDRvfB37TziuwR5oHeQ0IiV3x79V0+qLUHMY7ZFpQB+LnGcxG41kvposK0UBUCPDLKsLNQDSn3";
+ sout << "ltl6K1uTz/4gjdLjNVlkpO05xJXjK4JEoG+siPuABINp/7iLAgYGHq2G9p8KmKiu2CLtN/g5Aa37";
+ sout << "pewz9BkOuvCHm+J9/V8+rMHTCxD3QT962/XOO+iguzglwO+YlN3BpWTnr0VhpQN4ZZZcZcucEze4";
+ sout << "PgoLS6CcGSP6xrMdnasvqM1kFCvgOn2ROITQEJzaakPDaB7fzVIw6WyusM85BSNnyc8XT2RkDJaf";
+ sout << "VTGEMba9qIAi36u+pC/PH6XTEW+x6nehFJu8q8e7B+VrnN9lkpvVHVDxjci3CHvG1QLPGl1GaGYJ";
+ sout << "oLeINwB1VTKtRtEyKENgc0MS2FMk2G81MoKGKVKId2oddu9ACvHqzQMbOqnzp0l7RJewHez830/f";
+ sout << "8oKpsUvl1N/b9wfTXJwN77MF/n2zAg3VSv4HInhkdGA9HRSg1JylghqBAOrqpRsh6r/TG6ByJ1uQ";
+ sout << "rv0aYvVdVaHQ0kHH+ma5Mlc73U0x+ncDjlicbrIca7x6y/b90emfyRcN0uEKr0fZWtcbK0+1XJXX";
+ sout << "WWMGGDUO5b5cJOpDzW7GJe6i1m6yIPPlfp4AWLa4M6F/ys1ONLrbXqjYCFJY4y62OeRK32Ff2zN2";
+ sout << "/Lw3hMt6iPTm7oiJOJjDcamybQlWOoYIv8tkEQYN2nCX7k7DD6z183yX7KtJF8Sj95nOeYpzAnLG";
+ sout << "YOfY00SA1cM4aLRUPG+FYrzn8UFF8Bl9UFrLXoJ/IxXOWKqrbjhsKxMvC4xriww6KZqHx0L5FWGO";
+ sout << "lcFkhdpx2fFeswyfjQwtyqsGsiY7MXGsWA7fA1TOv+FZhJbgCixrx1rxwJaNel8uMo6pGHsZt1N5";
+ sout << "Cr4CkiA5ZZ0ITa63oOGM0tD30yaSLkSDZahPT7xo5s33HNAgXNxZmYnv8M6p421yuNPraabZyJ5P";
+ sout << "3FtNwzPpw7Q4BPSEBoKc/UbY+mdzm8ES0nKPI6Oeg1tzc9NgP5JaNVgWymA408iLfTjxaxS7FqOw";
+ sout << "BCYBbz0EdJigWmF73di27QmfsygTWbHrfdqxCafzsOPuk7l+W26tfJTVxP7S5snKfhbqVNrV/4M3";
+ sout << "uhpZthruy0mSHtBx5CuXAtGNQoesk06k22scTmX89BgNsYJAshikS3IHg+LMMfaxBe8KQaaySUX9";
+ sout << "j/ULsdT1YZKTeV3NVn/Z01BxF/ZPvV33Dbj4bjEcf2N9byWwicz2CFpkFyQlXaSPPSzMYYtSPCZH";
+ sout << "QqXL7ZKh1Xkgh68h5WyuoKkzDXGuZcl0bGVWXSOfO6gK1Mz22to6vJ3lfssNBUV1mxA7Yos1bUb+";
+ sout << "tGBxyzoBmphWy8dFCdE5vN/WS+w7VV6O/JQ6j8qbpBn596kNhL4iaK9Lzh0yXRR85nqJbi8bkIZZ";
+ sout << "FWskXD/A31jO2IUZKCdB/iqtIuEFZQnq4LLgCTlwF3GbxPeZTlJ1yKcy/2e7nIHZ7xHeG5tzyqjP";
+ sout << "oi2AAttJbrL9n4Ebk20aqzYxdYUkR8DcflVhCjPDfeczGwzwJxwznHHhEV/OuxZpWYRvTQDOuUzJ";
+ sout << "Qpu+291rSMqntqfrUo7uxCGJLmyRwWqfesAluH9y3GQco+V9yYzqf7ekdNpuygq+htLNc2lG6deM";
+ sout << "02oxJHjYyjDlG48zlMEFy9OhLLLPvFVmQWVL/D4FwAwKSQfYWmkUbgqgB0QwHBFSA+xqZBWZbzoC";
+ sout << "3xLt42o0hhdlZkXEQtGhtaqMfU4rBbKWxKx1gKhHYk/Gzba+PSi3J0e1A0fC4chtUfWc9UTZT0HO";
+ sout << "9QLCXzuC90KrYTYOeaPSoxfOm2aboXBWQfyxZGX9fKRdLgfJ9+8EHw7rD2TUTJueSdfvVahIRi+t";
+ sout << "eiEAG3CWoToa3S7i3tpR+YczkNVTl9DCuwD/QMAuPo2mul4bdVQt2NVnoz7PKMWim/JYSFPlJBXP";
+ sout << "RaKg8manuhMoDfHTVx3FcToGnIDBd2in/rfbEjE46d+98FRs8p/zhmyQ6/+xw34DKX/NrnU42qli";
+ sout << "1+erGDbJFhLmIoU3OhYTiABG5JVefsXQFgIL4kSF+Scratkn+s1aW28PLoooaClnGF14imxCA1Dc";
+ sout << "Y/O0HqBFR5lAn+57fsKP195CJBvmyPR+UE8pBE818w09TvBmN6onwBCFtsrEnzTmstINfz0TX5lP";
+ sout << "7eOD6+XiZM3y+5YBEK140cgAUae91wD0Hjz8chAlXmgE8xZfalAjHnS1c7ROuPxV9zhj5AjnXV/Q";
+ sout << "7Bllgaqoyl3q8Sc6eXgEpJMz8fyZ+LfMkKkMBycdjKG0AZAkF5902y9b1VPVIhgIHueHDt5irhQE";
+ sout << "2slLN6pSkRg/3FxPb97BOt0/zt96EvehPVGoKs7OlOd5C6tQLWio1Eh9IL67ldEsXOiwaSMw8QLs";
+ sout << "wAw3Vx1VfWnDCReIT7/yxZGHmEGhjdtqs+gGPtZ5jkIp0l4QPQwLhVL1tC4RSWlt8BR2Nq0Ivon0";
+ sout << "E4t8jhH5zWeLYZxLIVkyW0SKj5xi7s4Xo2XOZL95m43Kv2zrxzNngy1oPOXTAEUaEwuRKmo90B5k";
+ sout << "xdxVS3x9BarxlfUKwNBvwQ8OVmJu6VPWQ5hx++9ZLDfwK63V0gH0tIU69q3diMOjXv+fmvQe28cx";
+ sout << "iUh9o3Em/w+7P3DTZu35eSPUD6yzP67VH3pDvahh4uLzes3bdTYmpo39kD+gFc7ADodPWp2E4787";
+ sout << "JZaS2A+efTMt/OPen2TV+wMSSK80nbAta3VE1KSQogWp2bFPi0NXqC6GZ+bHV1DVnw5YGs28V5Zd";
+ sout << "RuuIzTLauK958DBeAc57QbcAun5sEsPtg+CICI1pcOGhv8hdLzT+YKcuUhji95EAYzCj41nxMM1b";
+ sout << "wP1rH0TG7jaHvET8IY4dQb5VvzNXVne0FExHIyIFGIUWoyQgjROX+cHDLe39tvjZo2xs27/CAhZH";
+ sout << "dsrgamGD6wj8ZMH+8gHzu6b1yy2crBAw1Ot2JdNhW0kp4DNdFgjqMcZVCfDM8qRZ71DBO82U9QU9";
+ sout << "hXh0DCIAyryS/6W9mY2I4IxPrbSL4RkmKzhepcIoui74zu3p+lKY9LgUxYbGNOK46S2olWwJYf7Y";
+ sout << "qkOPWcN6yen8rO/9Wkvmxl06qSDDhcYdsc5S409X/VeWz67rJ2vNDiyTKjUJaDWcSU/znMnhtosm";
+ sout << "QT5LyX0ZVd++zQvXfjSjBLkJrki5vvKMZSUKhiYL3mIapZpcnsOQ1WRwnmSEave/MidONmZEP4rH";
+ sout << "OgS00fnCK7eAWXsZb/rk7tq0cjAFsNeJsHe53E2hJK4TyaYcUi1IM7ZNSQ1yqMYbcFn5bEHBpuxS";
+ sout << "+TUy1QppziHZ9xoLn8nOxRstSGxrhBG3WjfMhOwR6JNOe4Y1RDfxVHYeo2BAmtKUutztKsf04UNy";
+ sout << "XTP5HSaKx1tZVMDn2fkIj6W+s07imc8sg68Sd1lwJx09bG9ggOw6D0eouy/IeJi9YQAyZT2BqBQ7";
+ sout << "2D5CrrD6hqhVF4IVhBPCkJ8+9hbFs5EA8XhItGVBPyXg4/alIJq2ZeBsjhK+tuKMunwOl13sxzn7";
+ sout << "iSO4+wO3geQ3+SzKShzywLk+uGm2BdS6teWxatVdm5hKNDirghiEzqdHdwGSoZUlLMYp3KTdzpMg";
+ sout << "eAAPELcLl78jObmQkun0ECGaV0paIRhytIRpV3yJGmrRIxWNI5WrqLtocN7hnm4fTrzdlSGnDojf";
+ sout << "CBmISIwp60UVjJijLDTVYBIP3VcwaoT9V2F2HkDZ7ifQUTvHmxRDFHuquws0bNRjLSBSPa4jh6th";
+ sout << "6h62qlgbHQ/BBLIUXi/I12CwYwlg3ute/qQJIpTCsRxf4rgtEjvV2dG9ltlo8PPMhOd9fkqQcvua";
+ sout << "BJaZYLYw9rxq4U4TY/IxFQe8MGJ6It9BOFHD83v3awKPKP84inEWXAkFyI0YYmzFjW/gofECo69R";
+ sout << "Wk4/2wv+K5MFEEIzBUGsyrjz0TmiKvzEJ08KZOuWtFXeF2LUGvYeL3TO8FyLRpPnCAoErjZzVAhI";
+ sout << "9Sd3ukmv2oZ5oYk52HrUQZ9WywqVgFL8r1KKEouCd3iNhmb+2qUSh8VcvAncWTQbn8yjpGe+kwUf";
+ sout << "HssRskQ0nKLIvCz6JwWEjoaglBC3PiJ0EOwDLNP1fKXqPtcPglnu6aGaTrb/lOTCX70yvk2XpdPE";
+ sout << "HzsxZHKsV2LxqDI/IT8yIxvM67v8b9CSGOv64a8MSBZ5zYOQR0hunA/iYWfjzuvX5dgW90GnM5ZR";
+ sout << "VWoq9zxcBEr+Nk0HlWHpPm3o08OPGmfx/ndMbLLtuH85q4/7Kelm3NSXLXfOxjkRuSZkcAeiRk8e";
+ sout << "64X1nXADtf6z1Uaj8H0JfREe91GPSUfyZjwI0pQLDnnsgGoG0DGn01qshIL3ANXsLTHDsdgjaFGP";
+ sout << "zZ2f/7V/NKLrJ6HrVpWoQiwhNGY5OYv/KXa2q2XMGybUaoONCdnjGKzPYyXA6C0utQNy7JTKcbgV";
+ sout << "/71QilRGXbV8OPBFLLN1VNHV2rQJDSH6UL4RYDdhRSsItpsl2tMJGGS2RLH/FwEv0jDo9e/218Fd";
+ sout << "0P3DIy3wAevcgLQu6Ex2SFlTpZcvO+yidCgCv56rJHlVxgN7eSbcKS/l7CiAcafTsjsNm5VtgTx0";
+ sout << "CM3ehbIeuPrV5GqbZPgbF1akOkP9vzeNsGGW5kep471Q+xn5o78MZBz7YsMT3IAHd6RViNn/Rrbj";
+ sout << "gLWyleDnUQ13gY1xNXl1SD8xaQVvWvvpmlZKyB7uIPEL2VrDZSxmFYyhdhK64yb4Nvvawge8jIWM";
+ sout << "a43Sgazodlobk0CkFktZz10gZEUobBos3idIWk0hQP+nGOLhiGO//fwpiplh+g6NBMLj6vI3GoHZ";
+ sout << "MKoqqYh4LPDaEtnvILpMCP5NJoBbxNLrUALf17ObB5Kgz9YQsxRIhtrXoyK0XdU98ws4zxDGn5Od";
+ sout << "Mj4Zpw/QpI2cCvc6fiue8+l/6BsFse0JL8pvOZDYhy73kaVarhJhQgoZabwnyATtN5VltM8niedj";
+ sout << "reSbVYi75bWEdYZrO1PESHmhr7lx+s4gBJQd7kz8wbRADk4dJBf8QDccgMO2yxf6biY6ezjB9GtZ";
+ sout << "DnGrIuG867zq9fMbooU1PUrXEMYkeeccI3d3ClrF11AiOxFAh/tSSj05pwl7UWVKDXJrP/T9sl37";
+ sout << "qW5Uy21+/Mi+kNMCaunB+fgJHsjvoQpwSmEb9k7b35H04v/qe1deEKPua/xXlawyTt4vv9OFwLFV";
+ sout << "ovcegUF44BJCRF7zlZnLMzyveCE2Uizux/z5zXOW21bFEaODnvc9N8oTl7XkQAyDT67JrBC7WbH3";
+ sout << "g8lzwSjSyC4paGxi87EifET4QFZ/wdhP/mP8lZXzkq8xieb2ad9dSCHS1T36XB40s2QmiSu/dXNN";
+ sout << "yaGFiWtFCKYhie16fkeBuCuzcW98RXOUQYBVCbSaDVM7nNTXU5+2B5NfM6ts6+7fO+ovsJh74pGx";
+ sout << "HrPXVdWPNb1Y0v7YhX7fLu77zpFbr5Mo1tdL3H1P8k/y+ep5/vsBE+git78nNPeoZajrbKuf3QtR";
+ sout << "uKx/sDQrnt3opQr2k0BEt2HZOPeDw0HuPJnpUNg0u31w+xWZN5YsTO6HBpfg9x5hsYX9YybRFoWJ";
+ sout << "bpfcNHrNzlc940Pbn7cy/XKiIsegvY8JKScIsmP0pnv/3rr3M+FDocBoe9QDnj7a044KpD2ghnwl";
+ sout << "4uJjqFHv7RoGkY4365yAFnD/vm7tEvO09ibX74jzreb5lcSu/f0hy3IA2IbFEYiY1MBL8MPDBh6h";
+ sout << "dKL6Jf+UHWrz7MM2bSfDzf467cr65Pt+N3bmX/BR5yd5sPyaiqQhEewGImg/bOiaqpRnIKIFNrF1";
+ sout << "dHziLHvI5n/RvsyKPc1ScVFFHdGs9VMXR3ODzG4Q4wNyGYm8Bt9ciTw+Cd1VuAODX8fjGFhCRq5m";
+ sout << "wlIDo6rwxR8ME68IXNMqztq1JPQU1VqEXWGAOBIewAYPLOepF4PR28SUgEGqn32oSG6hutM8UAK/";
+ sout << "4LEsqVBnc1XrCczlpF2vad4Oux/Qd27JfspaVThUqSc2x0kY1Ca8Keggn3j0s5GCSXxzsQh0CuYq";
+ sout << "jSJjKBVxbOP7XU9EHt6w4/9SOnEmd2QVJ26x5o/znpYpW1zTwrIG1ZhvU5AhBtzFtlLEOs1WzBH9";
+ sout << "c2jFEPc221taxwBmLaOqa22VULgiW0zJ0nFkXZ4vfoJghqWOK9N9ux14hpKyWUtPPwPHT+oqctBf";
+ sout << "RzNzQ/ODTSIkbFdJzskMR10TAYT3zsY27dWLgdAnmwXE1xr4SCbilLOEZcsJJDEX+XkRUsMbxKxp";
+ sout << "Cr/IrdTuAzx5MogSWKsUj0W3cT+EG8Nv+iHw/q5PIag2y98b99gk73KWnswo+QXx6nTCwuSuON2s";
+ sout << "KlVN2mvKk/t6dNJCbLNQoEnIxoHumQimOrsmgtD5rTXK2fR8rZlnBzUG5Bp/45YBEK2a4JMWivLu";
+ sout << "nY+zNxxg0wslFV8OJaH3yfssBMr7lzi3jX0D1dCD86mSslQZ9hAx/pkgNPogz8KDbmoQjozc05Dr";
+ sout << "nUKPCCTYq7OUxpGXu9JAFsQsbqnWIkqyxAg3+s4M2cc9+P+dFpb99cQAHDIn0LSBR57f6o8lojFg";
+ sout << "nPtdvUF4gNJd1EVqy2ERrWmwRQ8gnzLHVI6wJYlJVUa7p6gk2yZBz4yRak1JJUgxp3l1goGG/0Ci";
+ sout << "PsE78UDJQW+3FmtOwLuphs51+Clq65O/n5RIeLQpQ89mh1govqglh7o7nHyyulhQTrEpBEvy2GWh";
+ sout << "LhFNp5FWc1ZV/yl4SwQEIbSpGk/MTmlPM2vZcOwTGNIxHia1PISF2LNDMDMmqQ0N8y/NlaKha2T/";
+ sout << "ZfRhOQdR9nOgI0svhZQtke/ExHkpYNHsF4bbGisnGyzmtrKgr47s3l527s8lP8Pxe2g5cgX1focZ";
+ sout << "aeI+CIeKPFynBjP5GmbCoBpOBpFfOgPrMU5cPMvBktXq27wF0eKbZ1pi3WvJo5Yt5SNlRbs8cX6V";
+ sout << "dM0WDcqw39ff+2ztBdDR+aeFRmHelayiPJ6Rmd3dWgH5Zk/vIGEaSsc7/KfYuJQXkca+gR1SVE67";
+ sout << "wez78yJbRDcoXiWhFOC4ilWR1DDKgTy7ej+qsbEtgEQcAqLukxeAnICNaIYzrQsPacqSvE6MLBiN";
+ sout << "DcoQ5TnAjtFPGGkkhYH8TwD4kTMQgSDXt5yqADxbAMLu+XsmgOJzyAL597+yMJm0w/COAOvVYPRw";
+ sout << "kgsDb3IXuTpW+bL/xGC9RvOefz1UoxpwP7znW1IYeltHBqT9JLiGLMQSRTl2x+OudBurZ66LiqQf";
+ sout << "TnX+HXrN7MnTdshMCQ339YwnY6b1baYKnxVwNdIUcLnpyVqMCKZNu92bPJsdj4af5nZuIP8HMwoR";
+ sout << "g29BOeXRb8UNtWd/EMGiuHiNwMMhpDBN/L4H1f6GS6ZeNobN9k613HrCeJmlXQDBWCHE66vVsAhX";
+ sout << "X/Qwi0w5PCLwwpoIsgMDnZDLa9ZCyfBez3VP48ur6Qs0llb1j5y2gHz24yGH/fVlWPdnc0pbKvFR";
+ sout << "KKW9jVKicLU9NGaOq5vLvBmFMnx2YLwhGMIlOgnnDLUkB7wCY3ADrQau1lPLfQ4QHr919kbUpCmq";
+ sout << "3syJxq1j5248HvzVQRCQV9H35RIQGwd0fjwtK8Y60fxa6W0tD7B/hf1b9lrodaHQL4gYt9/lrCQU";
+ sout << "LezUF2Y3HN9LxAWQNfoCbYaz9HebHTfCM+ynCUXAXabGngBLCat1GxbYaWlbotAXawMKxRplVYnB";
+ sout << "mORrYoF3NaBlDPznNhgiU0Uz6t7Nh03r3wR8NMho/VNPv4XggWVXyyoNvqY1NDZVtHEPVaSlVq12";
+ sout << "yUOiQ2qEpATWET8P+19RL1KAtCoRzPcuY2Ikir9MR8sTbog1FWyTqZN42P6Bx+darDn7eQTRmaHv";
+ sout << "DrC97SY8szr6hZEA2ei3vM0JpNWKYTOHo7pTlaDWt8gEDt/oQFPUSk7WJmJJ3DeXCGOdnRABlCXI";
+ sout << "NXDZx79pTuebQNZWIysOeisk2h2TXVvghE7J7QyqIYh80zqZn6nuAy05mtUT5igW4twr21r/X+W+";
+ sout << "Eu+dnXFU/Cw04SFh4MVLMaZ8kCerQ21GQn0NSgFCPd/bfeJyonCdXaO4rVATPw8tgbsI93po//q3";
+ sout << "Ps0KQWV//ZPdqERqZJYkZKZm7CNJRC9mcLsPAfnE8PRF191kr/A7BjLJu4UgxcGZlyQbi/uOJFd2";
+ sout << "mErOsNiJIC7MuRTh3SF5baHXsjiVg+l9m9lBGXmaSHAHBsKpGqpFHF2UuJnkWihfK1QoNhEkUhBV";
+ sout << "QOI63RPKJlgU7h7hi8fEzxzDbXbpWhsMwcKQ+j3UBSBzUGbRYznWar5vHxf8GQ1LklLRr+8jBJwZ";
+ sout << "fIPGjtKXXyMNJACNv0iulszYy3XUM56cIdLXqjVgEu41sMUfJGvbi3aP0tX6wl2GO5gptrX40j6d";
+ sout << "Qys2Evu2rH26dj/oCWFOqK0EKcH/Nfj/+EY+gm1GI6rSTgKq8RtD5J9fEe1nro4Sz+B9J230FWd9";
+ sout << "G89GSwEnpPOE6hRzubd/NyFZqe9sxVIFdNqRlOsPbXC/gJw7Gx9B83I5j1dpYAsqWyBKhalrGRz4";
+ sout << "UMLThFH6rJkGO0YWO2FF3Y59cWk3rS7taKomqL37407f3VWOtRH3YNhvxXI/6m0LV6czsmFHrSp1";
+ sout << "lkpYyq/UlH4kL0c/m/P2w8JrXwD+1SwgMtMjuwIQsETng1TeEgdyRlkRnmKXVFDfSLQSppfq3U4Y";
+ sout << "zrlnUiKPk511ZcXkg7TOkHxIMUpkhlSihUaBPwT02QpbcJR73i50BzQNDVQbQfMrXny8LEv2D331";
+ sout << "mtjKrWCWThul7ReXcJV7p3LpWuJWD8h/6dwx8WuBg4/zKPXZst6WHD/xoEjYG8QNoDDOE+SylSks";
+ sout << "AwuI4F5meYJNpFAoCj7qQXJKKyzyeTz2Y0nISbP34Ue1H01zu4SBN7OMvhP/Kl7RSFpc6JkV2brz";
+ sout << "zksgJZ4vpuqdij2S4lQfJbtdrk8smc1VWsVtkRKZzRfbPmECuz/0aNHsWe7sFyqISk2Pe5kwuj3O";
+ sout << "3mvWDzXBK05J63FYRnpemGdnvIkKLddEmxNecMuotq2s7fIWy2xeu1Ge6/8ILJmtwm9Bq3afgFfw";
+ sout << "nRBQ7dj40h9kCgGUoAx0a2oEwTdvdgHp3eEjlyGMqdodEC5F7Cn+uSh89UsHUYbxAI/qTpYekaVh";
+ sout << "5MRydNwdIg+3XCHQi+QGEr41DECSXUAoL+mCG6W87WE70G1BsvA3ofdGtKUxTXskdWVtyNip/qXC";
+ sout << "f7Fez1i7Cd5U8rzkdCZD56P4KKx7H9N10aU5jIyfba6IKOG6WEqgf92SISTXCVuQSHqVBNHoGiG3";
+ sout << "r1xxH8KEzApRHJF+whOvt+pP8eDVfVIdLrzDH7YP5nzQvnD1eVv6iZw0izQu7ArxMoqgSXOZlKBW";
+ sout << "L3nkAohU98uR9oYPGck8HNiyKzuiI+DCeu7BOg7unI0WJsn0qE/F6EFEFYoqusBXRgDVwmySLyAo";
+ sout << "+0eB/sUawsnUh/w0lRHBso36uynLjUSMBCwppQUU84pJ8fqr5aUfdGfWzogc/q0pFxppiexM6xHd";
+ sout << "P8/lVcbgKEclaNJRkE401wDUWHDmtVAHiaZA/532FWyrucLOp6IRgM+eZAtao2/hQUtwOOcaAQNa";
+ sout << "DcCN488KJmPQntnz9zn7Ep2ZZvsrx6fg9ubEEjHHIiRYSUDClLUqqFAJZaGrGsXmrSDxF0AEfQgJ";
+ sout << "aZF7CJ0WBl4qyUBcIIeWr/+u7Xm9WDnD3Aowl3/RVxH4L9ffGDEMkCeY/rmNp2SDuWfti7vSrxge";
+ sout << "NSQEaqiPPMHrgQ0vuGU7ipACRYtW8MgxSwxMc8AaAbhPQu8L/ZEC+/pfxDBy9mtMKnF8Jj2scJtB";
+ sout << "GKhZ4s1s7F1fqA/m3Ts4rfA0HLY7bAWeXa2sNAdvkjoQHMLAiP/5BB9B4vNqVGLSrislMJ0sYfcr";
+ sout << "t/O0wfihW++wYxK5HJWn/g7U7+lu4OZHco87HB62wHcvy24LqwNKY1w033SQJzkLb6Nkqj4HU9cw";
+ sout << "E82PHpVUAXqR6FRqOSGX9bMMqWL/e66vl0G3hbhIwQs5toUgg3XVv13F1o2sTzjiQ+22wf3PEuD+";
+ sout << "aYIYKQiE4R8kl3/96og7QmVfWoPxZiVEGBzM9bTDHx88W1D/CkwPkH389oV1Xky/L9UHd8adnI6l";
+ sout << "kfuHNP/geDSReFNJ3vtqUo6RnWHAFaTj+x5GDJDtol2hC0KaDLJCETOQRXUOOW371wB3I6slEKjG";
+ sout << "gANfqlMyzsO/7bxUw8X71uDns683wpW+Kwr73bJwe0OeP4dgwgTdTLQApMZncaphcpuZCEn9iSpw";
+ sout << "GX3EO37EZR2RvLBMvlHhkZSAWr8TqOALJeFEuYiduuUmyM2xh7mLw73AqGA0Wm6rnc7oZwXhwaBa";
+ sout << "VhwUZyv5rGMNQl/p5bMO49+tQiaIP5UXxheMCUN8eD6m7qDyHsljVQ+Madsj5iaJw5rkNcO4oquG";
+ sout << "mkrQOI62kZGM7reBAZjYYjf33ttAZspzXJVFkNt8geb8e9/AFBdfB2nkJebf8aAAie13umwDR09c";
+ sout << "44fUO0jtn43FxMBMXRt8SN3zVf4ycLW0WBYliqRrfhbOlN4wfzHNnII0JBO4wuDMC0zXnurwcLA3";
+ sout << "SWng9i7SI/XH9YZLdEn1cEtkTNulFFsZJGbLci2EvHEnG6D/m041JrCBRP7oCGonxdi2zaKe02B2";
+ sout << "LHaMeX7Of28XebQgQ3ZQrpSxW+h5FqIVNNo9EYxnqDDKyB7TeQnuISmSOwrcARqs7EgNAQ3Wbepu";
+ sout << "Fciu2AFnzjsW37Di4Z+L8tVWDN4nZstII68QCzlx78pYFiWD7lBQvuMYXGPujFgNGXIW+uk8NM/H";
+ sout << "ZWKV4UopvDx32VBudzMGJn3XPfrFqeMDukDYZXe+8G/jo8poz/kPfQmBqMSoHZyoKZ6yPNEeo1pD";
+ sout << "JyR5/neJsSu8rCPX61ZMuInitxMKRTS8OD7RhcO5o0kNsFwXxe2AhJm2HbOCqSnGktciP5M3gvpL";
+ sout << "d25K7cTMNvZIpmwH8eQmOoicl8IRKpj6qzxZt4m8yZU5yFiYzkMfEPjUwCmJs7tCZelrNnaLoFpD";
+ sout << "LxraAAsxLtsTcIL9ajBequXTFW3SFL7jsG8WiYRSYmu60gWPgUcmpaK+dk6HFy8SFCsaxlTmCn4G";
+ sout << "cbIFfJU1O45770/tHsDxNOBu/vsdp5yZw97L/xl6jSM9gTMdFbTdYw4Va3nr3L99zk9WnSYVZtwl";
+ sout << "C5LtQR0xUAldgCgSUfM06Y85VismlOis/GxVfYqYSwN17u3Rv5xj8JDpTxV8N2CwRXxrq013NXoj";
+ sout << "eDztUxX0NWUEDZuVvZUIBv9IO+9UaBHLZ834PTzKolkznv0dfedf47H18EWGxhXcJs/bqIP7qlm2";
+ sout << "B3bpaGxX3BbRVzuGrWRccOO2K6K1wxDPBSh03z/qH2ddZsTPArbhqlvRiD7kmMDGJzX/Y2i0SuCw";
+ sout << "nwhTENF7U/QfBFD8hWqi6/5rHArt/YSfPhaJyBOoRhTWgGc5RPlvMKviM7fE41Yhyd0vD4BqylQh";
+ sout << "iOKpy350GOo7g72UBTjlUHq0uWTX9rMpghHkZrMt2bvNzUn9SCx31vC/H7dVjcuEjCeUH4xkVpWu";
+ sout << "g820oyLQSkreplnhgE8IyG9OjJ07t1A2YcKHq0sr3t+TMLp23O6D2b/ZAYcnBFVIu1T2LIuX7NAp";
+ sout << "tOmFBbO315b3PV8bQdywYBSlIcILFc9AGlttfB2ksqGF9Juhx7UOPczWeBgaTOhBFFcBoSELVoqJ";
+ sout << "YP1KMe2EreYgzjVdUEAlbUh5R7mOs/pPF6XjoP++qnu9YE82r567jLBo3fmMGQJqURjgAIxr12l1";
+ sout << "+O9rTfxhjumcmJo6iOeK6/wCPuiD2QosXIlYHi/nFlS3qJ8d5z+tU3R9mi6v6I70GRSB4TkFuHmy";
+ sout << "5AX3LA05BcrjPH5LO/ulytgvkKsNIgOA4lRgZ9Z+whgI/MxvgGEzmTEyM2nxUFfzxlM4Abi210+2";
+ sout << "WFu1QMDXI6MQ54KGwi17BQOckYEFa+0PSrxwf0ntQcgcGnpPNCSlSATUQZKCN6E67UTP1KRbak1K";
+ sout << "nMvD3D9t061X0fAWW1txiUca/iO3KLYtMqAKgW6DZXuUzJdgtk2aqUYTKoI7sVkyVHo03aBBr3fT";
+ sout << "optSJzBz8UIRxzRauuWo6o1mCyja2eM8fn7TMvSsZx4QkIF9XZA6UNdeUpbSzwa4XkdTqdzmoa7J";
+ sout << "V3z+mkCQ/AaaEctE6Oi63jOlzfPmAOobY9Vp9i+ojaeo77SXu/7cwoWgeASY6zOHdN66e/zedP/L";
+ sout << "SNYH+B7QfAvUgBLK7QuP26mtws0qeeGWPYCB3x5+pbeS/PHqIF578D3lasqUUXwa3Nz+OzyBGXBl";
+ sout << "AB587DXXvbqK5gsoPE+mh4CJ0qYcUGWOhDejTpuEG50OQiGORtaswXqr86d+3vWdsmoJq8YFEWTR";
+ sout << "Pd+TwEF9FO3qrN+R73YWEZ5nihD5eG2Q9ABWg+C9Us9KqtlPYQOeUjwrEBhAJZ1j3UT+OKHXuR6e";
+ sout << "6dFh0fmlpXewt/gZ2z2WMZT3okC5JyKIMROWN7m+W+mBMmYg19gvib1KdBeBLO4sxdB/mElqdYYt";
+ sout << "k+mwhcuKUWYmaSmnzXdU8/Lxyvfr+hpPIPJHYLXftm1DpNHQjLY2bX+UROQ67weTACWOtfz0VJGv";
+ sout << "auzt3Kd5UG7m9nLXgk8ZibGOie2MWMY8C2reyrfU5sqOnLO9VNwazWvKb34cFEPKrGU1QqVELim9";
+ sout << "kq/0gFdbsyiotlds/Va5AkeVAVC81yYhYRnQm2W23NY62dMMgorLZoxuJV3RPVUoAa5RKRPq346V";
+ sout << "HnC7z8pqOsVfoslTCivd2CEzNQK/cYT/LJ7n2Kw3bhT5niBsVNyAjZaK4oI5fqzMoqC75ipQz1IP";
+ sout << "JwLqRuBTBjipG1Boum2TahnMriKcAhV1vAiHcB1GkYf9Bwnne8KiWpVdN54drWw7otSWxpsD7oyi";
+ sout << "nPfN7o9CrAcERG94IxkZJD3uY748ly7UrScLvIS0M/zcC3f0FuBHb3k1LKOa53B4Q/wo9FfffpYA";
+ sout << "RoixLpFQRoFkC4l2wfMenZEgaYkYyfWoWEfGD9ucOiu8h5IrffBDzs8Ck8/y32oHvL/FlouHrmpz";
+ sout << "5SKBIez1dkvdFTI/378ArjAwoJ0i2QsGyS/uO7FqgTAgd/9iIqeGonMnJFRPmH/VdT5mZwwj1BhY";
+ sout << "2/L0tu2kF3uwGX4c7fSDN8AkPK+DexcvcRBwG6cdmFs1871Ltv0A+i2UTgdk6/Q7MGnhRxnDUp56";
+ sout << "7cTxaOSfGTgKcJJ/US0PZjyWLN6tinlrYxkDq7uODDifVo8kEIOzYn/Zo7YbrC2FEXSqw7khbirb";
+ sout << "5ErpT55vaggbD/HtahzrQeYIc6lF5oVtW5TuAAdBBwnPwtYrkAVYQ2KZeGh68JKCEbKfNoAeKr3Y";
+ sout << "hhU51cHOBC8p6hm5T5p4JZppu15YmKxbLZIscvWCfriYVwPycUde10Nu+AixLGcaJQCr5LdWm06+";
+ sout << "ZJPovwV5yErl40B+WEa4YTBm9HJqYXOI3JRAEkwvioR9TotiOOqCaoo8YtIHRU4y3YOUcAdqvtIW";
+ sout << "Sj9eLPFLgTQ33kOtsZKDDzBJ/eVl6eSv559u23AuaeuI0ynarOjvQPw/543W/lCKibBGhuaKbYJy";
+ sout << "igAy49KqZ2IBHGJEOxd/7e4pnBgW2Yg7T7oeruGrIlJk3BFKQxEOO9Usc7XYSiXMVXwzGDA4huZt";
+ sout << "2zq/k193xAGK86lvndpmzpJ/eyVqxqGSN7yyr9o9Mq8mfVJ6CYGXgb+CybtXX1r5FfNTT7ikJreQ";
+ sout << "koaSxOGLvHfys0h09POLbTZ4jXBVrcuLuYoN2Dg6vpnMQSEoZzY85NOGxyCPUHXf9KFRfUe+r5zp";
+ sout << "O4NnC66UUzrBan3eTYs5X7DEksg+bPx4KGMQ/byJm4Jr8Jzeug6S1qV1diRpkgR60KCS8Q2zCmqa";
+ sout << "Y42riGdaBXcaZ3MM1onSKNWiIWa62QGu/GuwY64JUXkSTZViCgkPbieNk4J9WSjwKWPHwPq97qBy";
+ sout << "UcJb7MNuA5cxGXOUpUgo1I6ewBWZJvTIU4o/b+hBbBntfgacsZDooFmU2xUYycL0/ornfsEuJczW";
+ sout << "1dI+thnc10hcf/WWhUpOuMLQK3f34AUL7tUWtsuYFaRN+1dabnldREIXMFnZs6G+gmbaX0c4TDLh";
+ sout << "i31s8ch2irdU8aSTT/1J7EkMvyDZt7NLHeTRgRRrVtGg8nYbBrB/MxwqGTjtD8X7+MYzloLLiUrc";
+ sout << "7ST+dShb/DY4JS5RA8I0RBJbqOcMYzdv61UaxfYOnvZHtbCGlj5MmIMaPCDe4ZTSPsk1+NhUZSKr";
+ sout << "NcbSth2EUMRZsJqDLN5Xh8LFoerrFLNfbb6r/Zo1I4T7NcVaSKim7u03ti6xCQj4Ds4yqDLrql0P";
+ sout << "8IP3w1WmY07b+YRN9eqS7gFN4ReqCPVlk1qnZOzzPjH8pUPqgb+0cjuvQyJslbT98FrPljGtXbJ5";
+ sout << "si6Cp+zT+aIEy9HTtQ8Wq3ojrVLtjMl/aRNPb94A0ncTZYuoZCnOC75i2lLru7HOwHCdA7Acyt4M";
+ sout << "a7xgqqPiV13DaPpOQYp5rwKHNelo56FyhrmhBC2qQznvwqLJPRcnMEvfsg3Prs26ncYOOwXJ8DK0";
+ sout << "GzXkJVznLs71ggUJY00mOvTS5+5sPWwxyjKhdckQfSil9orOJJWeDExnCVZYuIWBij5VXQiACLyN";
+ sout << "WBKo52tef5NJwqNbjInxKmFChkX8N6en0uHyEAPxNLrYrn5I8+HibsvccOOIwb9yeT3TV/DB4SiF";
+ sout << "rKq/nLdDdQjbReZfZUHhjKz7i/QB7eHinoBVgLcmAkN5lDJVec1Oq9GNwqKTpM9hzHl9KZueqC0h";
+ sout << "EJILZgoJDs6Vy9NgjK+FYD6O/2Uv9CGfnCL47ziWfyWgLX2UPFX8DTrnWG71HfNjdlWO0oAmwJxh";
+ sout << "0zoURT4B03KBdm54pk6TNeoThxauGpNkK+vU6qcILlI64ZFxaO42b5rjQXtnmYEGL9d5MroM39vC";
+ sout << "hykPHjc2j2qFIRUQ2Eg4mtSR++AAriXGpDu3WuIygTiKMNy4Jbt8AnInqnHe5IeWgzE2eAomR8Yp";
+ sout << "ZkI4LCBOIPRQbFZ+DnX9Ntxqx/6rJmXE9njSqgbepHd4e38SMP9iMBR6u5fX9joI6rcRikf9W5nu";
+ sout << "G/TiDJNhEwvSOtgdEu3pv/urCx7CFwbkhSVz3r/HbQwup/g5FDuTxteiAXYn9TKCWXomd4oKtip5";
+ sout << "aZw1aIIYHKR4zNPsNPw6uJUpwKU6aaJjAjXJtaB3Aq/fcvuHdCYL0AkL5AAItGnCq3TOli3VAHg0";
+ sout << "zLm6OpVaCUravHHyYSuwXHB58w/zjz88c3tlOLRJO2CoFIUzvesT7FkKN8+y9itHmIdvgZCCWbm8";
+ sout << "uoYwp4U7dy6bBVvcxjca4A9f5BcsnZ0u3h8JRxdhMpwk3H1kt6PwEFTrk2Gb4IemrBh4GugYC1NU";
+ sout << "ZGgtiPD9Lk9pXqPb9LHcY4nWCEioiMUBd9IJlcWa5NOnZe+xeEHgVpy0vYrtqVHsoRAiXzRTxgaa";
+ sout << "2RlSne0P1g0NgU4YCnRf12ZIS+LvPMbYe3mXfDpn099AvuQiUngIFGKm//F59pxxMjZg/ADElluv";
+ sout << "0eOBWtYiaYVDL7D0hiffIcBcKQotLJ5p4q7t5v+7oowQ7cMyM4hvL5IjS3BqCpKNwWg3PA9jnhun";
+ sout << "sAVHJa9lLMk3cdXix+BKNhfq3IfqpzXAMdlk7GeKbEl3F1/+q4sxVq7sd9X9XLNR2lhJVy93soM8";
+ sout << "HfWHR7qaHnA/7XlDMDORDxuT7H3IKxDa+TkORiI9ldqwCEzC0ZGDBU3rBBxNK1jzXH7vzQ1vX7x7";
+ sout << "eqYl74NRYH8j1jDxWeQx6bQWSy0V6N+c8uC9N9HPVuH/18KArH+sEDxGRW+Hxi1Wkls6+QyBiweA";
+ sout << "tlHueW6h53s3Hc79AEK7gM+40nwvjCOG9vojSoI7bc2ndc+gdqRxYdUz8mu79bx8CI9Y/WlGO/Vk";
+ sout << "opsB2ETODt3OdS5xMTUZfSO732Hj3nQUTmVVheFbxDpVPUXNx6AAp63iRR1oYv8b4w6rXrjpj2ru";
+ sout << "8U+EXxv2W29LDJFC/JRBwj8JyTR9lDhARPqehW6EZmUYERihI4NZbM9mhgszbC+XDiM5rL80FVz+";
+ sout << "4eIA3mJrPNgmevZC2Y8osV71pnmt54foH2yERar9tqSS+q0q7PWX3+5jIrM/IDT/TBrHQ+Boo0y3";
+ sout << "bz8JHPnwCKXv34Kjg4NxbDjFT0G8hL1+LsNle+d+PyYPAKSLOjtzDicknXz3M6DgjwMrzD3zLf0O";
+ sout << "jUlwIR13B4f1mwH/cOatF5LfXQRCmUxJt/16/VtPsB9FuC0sziuda+4/CGAIjJ5GB0gqzwN7DSOt";
+ sout << "7cNdIO7w0CTNOOfVq9C1zPi4fndOnX9F3M+sPSzv7YjeUbkchXzJhip96etsRY855grSR//reW8P";
+ sout << "XNodO/FvibrfkHKpG7fZOU+CRLqnikcNgIGBDYumpgdWA1ynvAOFPgAk5tqtLZKmrIe3ceptlG6X";
+ sout << "E3UDGPMNVgExa9GFR1e5bIq0GZCNeIyGMq8cGLny0uRC/u9nPu0NoTMxFb9rr2iEp1DJB/zyiped";
+ sout << "EoTpBPDptrXVQ9DOLvpZnplsmuuCMELZ7eP10CaDzNXodNNSFr6bk5oDTHJG3foIB/VHzdH7WDDG";
+ sout << "DKMcvwwXAB+gAe0Uz3C+W3DlvI2xAHxpjx4hPvATyJwVgYLus9pHzKzZCJ/gLYMgcgyArdTPI7JT";
+ sout << "ExgJcLp8QTosu4zuYCJ6GgPmtYlVtUdLWiPz6lHCW77bkkFmz+c6u9WF3yzzY1nyhecgnt/7jbyA";
+ sout << "iSeDme7x/u9K3n3CTHc7+ZXHLT6HSdxKlwZ6811c5Z12X8aP6vvL/ysGTVwa8lRngJOOiGQuX6GT";
+ sout << "PMohKZIhYGudZundQNXPOCavN+dpdT5m3AVMN9IrU/ht8o9eH+Hmhs0gw/iEwos7GgEJDl7IUcAg";
+ sout << "TZb8blBMrDfdTyAKjYcuj6iA8aDYes9sB/W8R6AtNRG4S9vQcJuCD9JBKjxwd/5jG4hrpibV5bMD";
+ sout << "F7dW4/5kQsc3AEdYIeGITaDjXorQ8UZ4zF9Zy81kTw6RAoTKdMYtDgnEffkDSRWGUqySn1CkTs/p";
+ sout << "cdY3SRk1HMd98JwlVLpED+mWbcLwRuROBimfERC048b/vfUXzGSCgdYZ9AGQhHPzu8y/nr7Lp74h";
+ sout << "I0ghlojt4zEkgBgK1JRMXD/dxwqIu+PIOo6s+8vwOMCLytvqCcTR8hqs63JgVGJ0c4/O8R1nbwFg";
+ sout << "iQiCrQ2EjBbGaBMq8vhYNA1r9QzZwHejoHm2Z4a+JXoef2al/GqOWnuOUHBotsp70MwvCS8Q/EtU";
+ sout << "TFGRttRKcrm7OhNmeiFLAr4P6C6O7gJxMBu78s0bIzpSIDTZFCZRYU7NWdSysmRq1ngerD9inrKg";
+ sout << "zt/qQLnry4PmY8NjPwt7QYcQziZLN2ruCtbvk2DrPVSx24HDHGvB+0g8jLNLQDfS552/Xs2G4zc1";
+ sout << "wwPpayeQy7HFGT7Gtf3oCI4jb35g8MTCXoOMgM1Dk+EYJIyDT2PBae1AcDcDVj2JKOI1UbvanFI2";
+ sout << "sFpsLkIgj8SVUI5dX2PEEl7AjmoFJqyeVpqmBgNEfNwPEqfNNLUp6mic3WusrWO/jQwFtUIdEWAc";
+ sout << "tjyIqyXeF1e1zO5po9/kJOrRJFAFxMFI8PVZNSQJ6KIQDhjPJHi/z5cCaf+lxqSo6OUt0D7z9o5A";
+ sout << "hfA5GwHX6oy8tGqommBrb6nF1AWVInk19mynB6E17TQRo3T5W2OFG+BaMRO1xlg1znAurKht21fr";
+ sout << "6VZscHHtctXifsg4dequ2SkchmjJ5yWpxqRqckES9B3nVTSiqn71/T2O/poDieJhqADWNw3gj96c";
+ sout << "GI4vlUR4Zo1drdvtgwHFFR+RKndNuzv2BIYvB4nm5SBqx0nuKcvbbxkEEhIRVOb/GiMJ8ZwjeVxy";
+ sout << "b07lDWMdBxLyxytbDoNZbefVc8N0YFrsFvrc16W8y4XKS2NG7Kfa4u977Rr6Yd6+u2+DIYwfsrzM";
+ sout << "DWs67DC1+vHawIhIQ5NyB/R6hx/Tym42s7IK7l9w80SaBvpN2A6vaDlNlOD7ANeZsmaAG8L6zFsb";
+ sout << "FLBPRNFxm+HxHYjpG3+dBySqcIyK0tSFfykLVffPMFCtgzTwDM3+5VJEghCyVlGEhDl5tv97jwVI";
+ sout << "aTYM84hH/JqKcDUQwmilCXqbb8bh/kTEWKzsuuyaLxr3D6+5MhVMakjG3RkIKfjddRukZC341efa";
+ sout << "fFJVWanSIf3lSrhFjsiH5k63EjbZmAmuMpfiLZ/Eh+zjBbh0dE1spYJ62S+PwkIXMTjCxmoJW46O";
+ sout << "5Bdl71SqZW7lnQUubNcgRlxAZGKY3aQlIV2qHptzubV04SMqdcyVYPrPmvZSM5U40eDoJB9UYB/D";
+ sout << "JaxlWFMYrW1ntJTv+LrtYepKdD3uC9SLKi5VYrWjx9WzRMxxkDcNt85Ak2cfpA3MQXI+uT7lQVoG";
+ sout << "vDM472l4Ruls317G1X95HA3lcJNseSQnFsAeE9GhE6XXmxDwIDqUQ6Mk+F71rvDufNIj+GaUTISI";
+ sout << "+lwC4tqse9dg+7wcqSTqe1cx0K7tZo3ZwR+Y3JEe+6HyURImluo6BRV0n6URS2MLpgN4geV0m+6z";
+ sout << "ZIML0oUAKyJVdyj4XWEq5Q/pPDvz7jDfKyqcHTCwBCuBQEnhsxi7Pr3yEac/dwYDKRSkX+7Q9mdo";
+ sout << "lAjgzkEnxlR2JUJxtrsEuhTiWql6HjmrkT6ohOeR/k24Y5cEiKx3QM7gBcB57IoxxMVaDJut+Q3x";
+ sout << "o4HQ7J/IRtkKCPp0LLSK4Ue+nSgJY5LWtD2a4KzmVnahmXeYXCuAsKm3+7dGFISapet16Grci2+j";
+ sout << "ETdZGgs0DEdZWncLxrCnhnvXHrfsSsVyqocOtTs1KiwUgGgcEhlSBZyH6bpF7IYhQhPzML/SIwy9";
+ sout << "ByDnFZIvESXwQObXa1iYfR37SyV0Yw/75bcWAnx/+jbodjLZF+RMskqs/i+PnEasmlC5wrjYo55o";
+ sout << "E0tqAakxpTNP8+TrRgNMLFP4tP25lfDLUka+pUXoeLC7gWQ0MTA8LDx5GyMlblaIFGHx/eMZv05r";
+ sout << "he2nLJgKs5ZtTMh+8pMNkEBWXQDKyJn0oVLcRQL80bPfpUfsnTW+jrEkmH8azM7CN028QyPEhBPx";
+ sout << "lvDsEjVbElUqBH+eo3o0CYLpnABUQ28KvsJQhHAsk2WVScBv/hm/T4c2zABqaDOl2LkRCK9ZeF2W";
+ sout << "kqLNjVH/iZBVQcjo49++lINm4Nupi0GPnWpsuMnNoOdZzK7Oygqbxs9Zz5VqdjRCpxQGQgOhAD2x";
+ sout << "aifYz6RSk7i0RB33QkjE3u+MMEQLL51zl2EOREXed6AJT0U9D0+hSLH39wqMbdEV9bIFcJ7XprJP";
+ sout << "ueO/zZOmCHIBfz+47Kec5BAWkufEOfKxOxwV/Wvh9+VA2RcbA0exvVj3Fy/hPmRaw6E+SBZTJoi0";
+ sout << "aQltfhFmfLQtT4SRl2FjDwlpNJHjmbmVOALldNBRhxSz4oWfurPqAB1A4MazDorJI74vqElH1SMQ";
+ sout << "7RZI0hDhS100VqCdgnYhtMa/We1kNuLcLi7cSq28JPUE0tkIE8SVLldq2Ekn90XBP3mY8LJe0WMR";
+ sout << "J/LzfvopGKYrqday/eYuiXl+lihXp0dpph9ZUpRzmAYWtns/djnzFSGS3bF7VOOaFuGxwYHi/1p1";
+ sout << "SaiBJV2a4U1vr0mh2OU3wMuqd/t5nfl+Sv75zG3YOAvcoiUH76xxHvRnn+D97TegC72k4mUgBCPs";
+ sout << "NZJUhjyKF5S3rerk5uhxFmlAmy+XMo9VJJaYs6OxiIeSQa3iAnbnWDzY3m1chupbDYK2fs8tBg3c";
+ sout << "ta7LM5SgWZLMN5Lj1RtEbFBpAylXWrb3JXGuaK/C2biFNTSFU10Xoit4NcqZuYTXu6OhNahZqayp";
+ sout << "IYXTMXUy1KPyHHMFWZ9gXXB/igOg80NZ2O+EYHHfRiqSOFJ7xhnY+H4X0kh9v6N7lxlv1WOudRnJ";
+ sout << "lWg4FktfX+JVDflDvJ0IqKuM77O5fgtBpRL3tpS2dn4U227ZiXRybiv9zCnbpK7cfq7/t69AVgUG";
+ sout << "qjyTGZ+YG+PDjVBWC0KGsJWIBIHwCoTgIZO5kgU/831jcG+LyuHWf1Q9bIEkOXy6UzexKYCJgIaf";
+ sout << "EH8IsTAVe+uUvVSBpr/Eis7LJkyx6KOAbDR3eXjXjhXJTSTK2efctB2Bb3GraweN/LUZ7suO16Ng";
+ sout << "odHLPnS7jsgag4B7sLNCE3NeGfd4VIE6bYg/9+Ci8k26WMJzOHIO3H+Kc7cTPCdohAQS93epRcLz";
+ sout << "bz957op4v9NBlgMcIKW3Fp1opHX/+vApvBecnNDF2oJ0bUq4pFuM8Ag+2PdSDlJfcdnq6S0lNA8w";
+ sout << "H/65WlKeVTHRt0nm/p3jJ8JNUlIZa8iZyGa/vpCaLqymASIcIfYEdfi4ugCOvbKgBOT//4cl8kji";
+ sout << "3BpcImX1IRx1UzgcIcK4lqO8WoSpGqhClHHi+/ezdj06JLKP27zKeHZwhTMAyYifDnCOJG2XtuY6";
+ sout << "gg4LTyI7BcZ5jAogPeibS+GdwlEfN+hOk1idevzrgEdzJTIR3i6p7ugPS4unLC2+waQfdJMs4Vjd";
+ sout << "aCmDdalKnQlpHL0Smr7Ezu0P1jPQcyO2hGupDOHl/TslPpoPcA1N6OSTraByTWL4yo9aKG1QDIOX";
+ sout << "NLGvos5UGUwmPxJMTeBGkxT7hyKmNzMmhfKB/yfqDaBaV6fosCm7S1FVsdnU4PvcFxa61Jrtok6n";
+ sout << "/lwuY3BM5VCG9/kZQtVUtImeRB4hYy8fETZI/WH+WSGhZZaum+Z7r4Ngg8/DC21U33wyR/WElnVE";
+ sout << "+Ny+yHoK9xkiLpLlx4ns79XnKV+S511udI6XniRyg36kCNdRWTfN81HfbS6h/KXzl2ByPVo0Q9Op";
+ sout << "lTF2Ox2WJ0x0RMrE1SyS1KMyL1ff+ToYLKarrgalvFUvSMI7cxPlc6phczmtYojlm9R7JdJDpuGB";
+ sout << "k9qA/UEnH0v+wl1+5rt+RZ4dHtD4FOXvnqW3GdN7XvISIFJLhq+UMhBPiI4M39ZRxGpuROiF3Dzd";
+ sout << "gD19YX9c930yj3JEhkbYzjzwa9CWLcnXbSYe9sayBJmCjpUxYmfrWo+mH66POo9ZxJeeEOPiVS77";
+ sout << "HXv52am6J9m3gm2Buruibflo5CGo6Ngw9slSm2rcP+pGEAyKjQoLrU6fb0TxqUnRB8q4PVvbBRFj";
+ sout << "c1y5MU03KShFhFfcM7iND5az9RcBJwyNZtCZr7OxlUDVDJP5IJubY/5dahCfiX3EfMXT/dR6R8Yq";
+ sout << "jnUNqjVGnIVm456KF1xlEZBDwCL5LNUhGCV+kp90STd77lypcQPP1q8iFDo1HhHnJDj8is+5eMY6";
+ sout << "5bfMP1EP13tCqPnHgD4hK7Ega8bkvoEKZInxohWoaS6F6o3lslr267EKKvjHDmnA9uGWzxss7nuj";
+ sout << "3PlaWuYpU9NfJp6Eq0xYNYcf4qLTN3Nhd/GRpptxzdMCuu8x2CYKBkHKMy5iJELoJ8ntEdLtVdd+";
+ sout << "46L46j8oPIUyswAbmodfp82VIodnJ8xvJUyJWRCXpCa2esfid02xvel9h8w1V8Uc7BKu9HMsFCDn";
+ sout << "+FbebdVIahMe67eUI3i9+mSZ3BNq0qbkn/dDabGwixw9lsCU5uyHZrzafG6lIA/MiepNzIRLO2Wp";
+ sout << "iwhMc4f4l8ZRb89HhL4bjl0Wo44VEBP+Sqf+jWMPS8qo5st+MZbtu89RObDH4fOr+1v5dvGkl5/d";
+ sout << "YROeKTCWyAWWSSoinCHoR/IcBi8J31l34Pk3nNrZ7BAyMn28ZY7mue6AF6hpieJ+o/0RRv5m6VM1";
+ sout << "1OpAFyajq7E9/BjrAfRSpCtoicoIx+APfjrLbCkEehMCBZoOM9USTMi92Id+Fm2iI+/AMStEU6tJ";
+ sout << "zumgP60vNm3VwZw6ep1m1DSb+2lduAxtuDtaspztXYx3QhksLmbzeKD91/v2Mu/JveEfy35tf1kY";
+ sout << "fcFtGY/puCXnAIxEeKY26aiZroOWt6HLIafNvD4wg//A/9iTszmQauiTWQbBLxcK/KreAE2TOpJ4";
+ sout << "klwh8NfDhULKVKoJ/AgNS4I/Y/LFZSX1VerAImW/WQmpkqvjnOUrpLNkuV2NjA2tKP1G45dInmG/";
+ sout << "nAnaPoGs/bAdQLIJayWx42xsKM1pil6q86WvoKcEH1WrRRXsRJgw0dU+Z1QIFlC+xKqP7xS0LZEs";
+ sout << "fjyeED07SGKBhUj1kbSp9E3PMr32yDHj23zmnFyBcwihktaRV8fEquoE0CacrSoxfdxzdWIDy9Ya";
+ sout << "6tVBbCcxWoz4CSGzBKSrEXVjedHR6H3etlrGwzPWYwbiFQQu3oSNml6ORLh2fu87qIjz8VrO2804";
+ sout << "dAqaUcJBtHOOB5KnNMSyZWA1+62WX3P/yJN8Be3AcHY7xf9bP1HUxyq5cWcAbtx9yJV1dhXyvPz6";
+ sout << "D4k1sntDiWgivLkY7l3OBVRs/QKGOOTj1uLei/v6X1vR54lH5wsRwq6niPYsyEcCnWk2zzTKOcK/";
+ sout << "OBhLQHtj5Wxf+NQ/S1zZnIBS0cww5BqZD0aATVsHfifZLEw2eRM+vap33o0ozqXIdbCzzsotLwlx";
+ sout << "CfFT2a/MUDatTSTuGjOPpjbmVtK7qWkWmHAF0j/GQFs73K/8PGmAWomFb2WWB8frlHkkJoEPyHVR";
+ sout << "N6hlsAEd8ylb6SwVawZglxdpqfhQpn71NaRU/ZJnk7fnSVmKwcGVeh1sppPYKEu/Q7i4YrAGtLDp";
+ sout << "Osipwqh5gL/skuxQwsBI/VNqoxIELFbJKhGrVqr2kHWB3yE9cB5puaP/MPPvIwbPSNRZVVbvGV+m";
+ sout << "Mmi4LDSKPlxDmjl3wWnzdldnNUb281O5dA7lMuPHgibIFjl8EeVHBFLocpnFbSLRiCZx+xh8sNRX";
+ sout << "ti+5Zl/pGvlloP2k7yVNRFpB3Av30jzQePY/DOy5DoF2k4eY0qvSR7HtV2IG+9wpBAUCbIaD/2t3";
+ sout << "FBcQT2cWEx0TPZbBqPLAxruNGcv1z23BdWUeQpeEV6ij78xVILQegtNiJlCgMTCmIRr1meCfuVJ5";
+ sout << "vbvNLxRiL4JSZbjKgfonK98os72YhGB0Y2oL7ZpcthGFKCGAjo76XrMB/hoeXlx6UKYlLknZImWO";
+ sout << "NQj5SAhZElv+LKdSZ1oc9U+0Qm8K4G9+8OnQ4wS61R0MpZKXnfFAMp99XHqN0tD6W5Br22/IOyj9";
+ sout << "x2QH50KAfIYZFrJaLt0MMylpVhHPQ1RBembk0wDcwDt2phnU5nm3m22r81uUHp5Sc0Or/vrzpMEc";
+ sout << "9vqhEFhFG74o+EGTigUiyzCGUHJu9ITqb55xDmPm829KOqkOEV3L4XX/jLwRFVkbAHaijjmg17xN";
+ sout << "yJZkFlYG/RntbFd+p+10JuhI9ahBvvCjFnkC+8/F64Umx4jlWJRPIADPr3yH6tt+nVkC2Cbx+U4y";
+ sout << "bgINetMWF1b3syNyJh4qJy/KXX5UckzU9A1Xkz2MvXWiB8Jp0f+Q/bZ0inTmecpyBGWCsD0xRYPv";
+ sout << "Ktm2zqD2HL3Qtfpyi0khXGmUDKwvuFnjX5V57ustxKCq2h/ylqTDCMpqS/QQ122Tr2DuHHVjGwGG";
+ sout << "PajaD64n0qEj+zaqyUgpDZatEVMgFxpLpbJ+sTyBaYKYPYqGxoo+XycDYIWoc7K+7d2NT76qQcI/";
+ sout << "LHo0kGXRTNiciphMn4EX3PkH7zUqR/yCWBRsP4wtPax6aM2m6IpY3bDbOfwMzXDTZDLAfi3is8Lb";
+ sout << "H5AfK2DDG45w4olFJuLKO27zafuz9GIAgoTsUwpJIJeR4xdtlP9EtMjG6SZdp22+MBSh6w4CXe82";
+ sout << "iIquldB5HzaHvBr3UP5LGQR3W/Wky4udFj2+XCpKsDUll+4MZCeGz1VM5ZYjDKfRDzSaLjJspoVH";
+ sout << "TT05yuagEN3+w/ROxo419ZEnvR+X4QLyp0JofhFpSsr+J9fQwaz3IGpPMs7wA+Q9CfEw3FkrFfeH";
+ sout << "022qfnq3naUsXyh8xAeI4r+MP6WGlscgFkWPZhM/vDq2IcIbCwZ10Ivx8r9bwi6bAfPE5cru0oRf";
+ sout << "YYs7kU0l9z3yzeSvjVjkdZbmgRNPpED67KlUQyRibkWe34KGglSuSClFu6PWrfFxb/9ch0KTFVQS";
+ sout << "fo3NDryMrF6oeLHe7osiyJ1ismBimKN3idmZ7yUknYbpR7/Z2K0LbAIttidcQ8LpgUzy/6jROtHE";
+ sout << "MRLxKl4ba6tx6BP8WpXWVzx6xYox42qMX24SkhhNVR/9ETntqyFEeDXXlS88Nm14AZARVAp8wJ3n";
+ sout << "WaA9OydaCeVzFVZ614BRC41g8VoHz9iZxHF/7fL5gz+YTJH7ccf18hxv4U/y0ROiZAQflTW6+lYI";
+ sout << "kIO8t9A9eQ7Djh7cOHCyA7E391UriFUZH8kBzKjnMeF27uYx5U9acC5YUVtDk7O7Xq3wqeGaxS1b";
+ sout << "oTVKitvXCbdmvIiCK8N8RZeVlteuYhJ20u7LTHsO87c1Tv83pu9OiYDMwEpAFLwfHwi5YfZecAIA";
+ sout << "bkkveDusgnh80pxc/24zp7W42Hm6hfq852ZLLWD2bHm3YZdey2WIbGsjvQS+io4xoEKGLHUOi7Vs";
+ sout << "/Te80Yujo61Y3BjY8w58xNplBAXvK4Fn2EenYcDz839o62+xeUuGN4wSUpJF94aU9//hEF93J9Zf";
+ sout << "CxFPlooKe1W7nU3sl2ziEh0PeM4RImmQ9zVNkDEx8ahdmvWWSWlmlv1K754XAdwp68cdfKUIY8kk";
+ sout << "M666TRZrSCDUvDufPL4y1LTpQUmIhtNLkrYZg0WEXx5QRF00x90VrsIFcXA/3Sjc2MPi0iMck6vM";
+ sout << "kh4kYbIqlvgQSwTqc6XmXEM/W3HRuVCH8kQKIXjN5my0C3RbbYIr4vAPaQL8PCWaIjbLSHDcutk1";
+ sout << "LBjyPnJQzn2/vvm+3oKN2JXPxeKvTbaoYu7EvWtXxkFJlrKY+o+FQfxmtN8qxniGWVLuLX6k8keB";
+ sout << "wCLZV6wF1XG5FsqBoVh8qvAV/Tykk0VO5FJnoJ0sVwOveUgbCFKsReM24lZlgzHsHFaTN4qSWHyi";
+ sout << "BGg3nr0dETiOyEfLP/ckF4GhGMMQZmT9jMFyUEgDfszoDgF+jxA1buvCKTRm4iVgFT16pi4wTG8I";
+ sout << "fCOM+SU2UltyA9EHdOW3rm8P8w4z0D0/IzH9Vd/O8Y2kB4TUOW4yHdB95MBCJbvEGYXR/A1eTsMW";
+ sout << "J96ZVGCmlcOzWqPsX61LShr57TLbE7+hJzxUMEW+ri+goqnSehcpGurw0+d4K3+5jFHxUpNaT5n+";
+ sout << "t0p0S42lOvrgigSZrQiJozBCCpm4sCCM91bzGge0VpyuIGTVkzbzXfjzF5J/wliLSVyzqSNq+HqL";
+ sout << "06BqMB0leDtLT/msAUV6SRKr7/n6qMgkrIfrAri4d13kZTpexQTRTFXxcoIgwUelzsxhdwZQ4yMX";
+ sout << "dAeL/oKOQdjCgnsVFD5BD7clhTEOA8gVVxASHinziac7ZkdYaHwESDZBxJhv4j1nkKNLWZLifph7";
+ sout << "NdJBK51yqlBvkZKROBbTiQi7UCe/YGXusDPCBsu55iieo/o65HkF7olpANKxvHxBrQuCTxYnL1tl";
+ sout << "pMjxmZc1BA2T7vyd4AzXRJ6tMUdqUqnjPVFY7OIKMgUn7qRieMqt95vzJqh8+jdApnY+xwnIKosv";
+ sout << "ox55mijPLs9oUBzAJPpD3nDLR9pnTVIkY2RmVRQFUN/kuJHYbNtc0PRIAv6iDiZhe+jeCkTx/dXC";
+ sout << "sSVwD5hp/v0TvaPa0XSPr1BbqlvK6KjtdsVJsUOjHFskNm/8qlIGKp9F5QCtLOBhp1eoy2AZlNlN";
+ sout << "+eYQRzwMSsJNxq44rixF97d7qeiOkC/Uu3wNk7aL11AR5iS7gau10LHLs3YhMbUcb+4kf2j9NpWG";
+ sout << "wqMklOYYJag/XNyoQs8g44qAha1rVyeq4eXodi0JegvjkXWEB4Mq8jBuHXbYjYiRiHoL68/9mry5";
+ sout << "nlN2Duwp7g5yl982CZLZc0k7uSjKaDkWyynH60MwLnmVj2sA";
+
+ // Put the data into the istream sin
+ sin.str(sout.str());
+ sout.str("");
+
+ // Decode the base64 text into its compressed binary form
+ base64_coder.decode(sin,sout);
+ sin.clear();
+ sin.str(sout.str());
+ sout.str("");
+
+ // Decompress the data into its original form
+ compressor.decompress(sin,sout);
+
+ // Return the decoded and decompressed data
+ return sout.str();
+ }
+
+}
+
+#endif // DLIB_FRONTAL_FACE_DETECTOr_Hh_
+
diff --git a/ml/dlib/dlib/image_processing/frontal_face_detector_abstract.h b/ml/dlib/dlib/image_processing/frontal_face_detector_abstract.h
new file mode 100644
index 000000000..20815cd0e
--- /dev/null
+++ b/ml/dlib/dlib/image_processing/frontal_face_detector_abstract.h
@@ -0,0 +1,25 @@
+// Copyright (C) 2013 Davis E. King (davis@dlib.net)
+// License: Boost Software License See LICENSE.txt for the full license.
+#undef DLIB_FRONTAL_FACE_DETECTOr_ABSTRACT_Hh_
+#ifdef DLIB_FRONTAL_FACE_DETECTOr_ABSTRACT_Hh_
+
+#include "object_detector_abstract.h"
+#include "scan_fhog_pyramid_abstract.h"
+#include "../image_transforms/image_pyramid_abstract.h"
+
+namespace dlib
+{
+ typedef object_detector<scan_fhog_pyramid<pyramid_down<6> > > frontal_face_detector;
+
+ frontal_face_detector get_frontal_face_detector(
+ );
+ /*!
+ ensures
+ - returns an object_detector that is configured to find human faces that are
+ looking more or less towards the camera.
+ !*/
+
+}
+
+#endif // DLIB_FRONTAL_FACE_DETECTOr_ABSTRACT_Hh_
+
diff --git a/ml/dlib/dlib/image_processing/full_object_detection.h b/ml/dlib/dlib/image_processing/full_object_detection.h
new file mode 100644
index 000000000..1dfc99b2d
--- /dev/null
+++ b/ml/dlib/dlib/image_processing/full_object_detection.h
@@ -0,0 +1,191 @@
+// Copyright (C) 2012 Davis E. King (davis@dlib.net)
+// License: Boost Software License See LICENSE.txt for the full license.
+#ifndef DLIB_FULL_OBJECT_DeTECTION_Hh_
+#define DLIB_FULL_OBJECT_DeTECTION_Hh_
+
+#include "../geometry.h"
+#include "full_object_detection_abstract.h"
+#include <vector>
+#include "../serialize.h"
+
+namespace dlib
+{
+
+// ----------------------------------------------------------------------------------------
+
+ const static point OBJECT_PART_NOT_PRESENT(0x7FFFFFFF,
+ 0x7FFFFFFF);
+
+// ----------------------------------------------------------------------------------------
+
+ class full_object_detection
+ {
+ public:
+ full_object_detection(
+ const rectangle& rect_,
+ const std::vector<point>& parts_
+ ) : rect(rect_), parts(parts_) {}
+
+ full_object_detection(){}
+
+ explicit full_object_detection(
+ const rectangle& rect_
+ ) : rect(rect_) {}
+
+ const rectangle& get_rect() const { return rect; }
+ rectangle& get_rect() { return rect; }
+ unsigned long num_parts() const { return parts.size(); }
+
+ const point& part(
+ unsigned long idx
+ ) const
+ {
+ // make sure requires clause is not broken
+ DLIB_ASSERT(idx < num_parts(),
+ "\t point full_object_detection::part()"
+ << "\n\t Invalid inputs were given to this function "
+ << "\n\t idx: " << idx
+ << "\n\t num_parts(): " << num_parts()
+ << "\n\t this: " << this
+ );
+ return parts[idx];
+ }
+
+ point& part(
+ unsigned long idx
+ )
+ {
+ // make sure requires clause is not broken
+ DLIB_ASSERT(idx < num_parts(),
+ "\t point full_object_detection::part()"
+ << "\n\t Invalid inputs were given to this function "
+ << "\n\t idx: " << idx
+ << "\n\t num_parts(): " << num_parts()
+ << "\n\t this: " << this
+ );
+ return parts[idx];
+ }
+
+ friend void serialize (
+ const full_object_detection& item,
+ std::ostream& out
+ )
+ {
+ int version = 1;
+ serialize(version, out);
+ serialize(item.rect, out);
+ serialize(item.parts, out);
+ }
+
+ friend void deserialize (
+ full_object_detection& item,
+ std::istream& in
+ )
+ {
+ int version = 0;
+ deserialize(version, in);
+ if (version != 1)
+ throw serialization_error("Unexpected version encountered while deserializing dlib::full_object_detection.");
+
+ deserialize(item.rect, in);
+ deserialize(item.parts, in);
+ }
+
+ bool operator==(
+ const full_object_detection& rhs
+ ) const
+ {
+ if (rect != rhs.rect)
+ return false;
+ if (parts.size() != rhs.parts.size())
+ return false;
+ for (size_t i = 0; i < parts.size(); ++i)
+ {
+ if (parts[i] != rhs.parts[i])
+ return false;
+ }
+ return true;
+ }
+
+ private:
+ rectangle rect;
+ std::vector<point> parts;
+ };
+
+// ----------------------------------------------------------------------------------------
+
+ inline bool all_parts_in_rect (
+ const full_object_detection& obj
+ )
+ {
+ for (unsigned long i = 0; i < obj.num_parts(); ++i)
+ {
+ if (obj.get_rect().contains(obj.part(i)) == false &&
+ obj.part(i) != OBJECT_PART_NOT_PRESENT)
+ return false;
+ }
+ return true;
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ struct mmod_rect
+ {
+ mmod_rect() = default;
+ mmod_rect(const rectangle& r) : rect(r) {}
+ mmod_rect(const rectangle& r, double score) : rect(r),detection_confidence(score) {}
+ mmod_rect(const rectangle& r, double score, const std::string& label) : rect(r),detection_confidence(score), label(label) {}
+
+ rectangle rect;
+ double detection_confidence = 0;
+ bool ignore = false;
+ std::string label;
+
+ operator rectangle() const { return rect; }
+ bool operator == (const mmod_rect& rhs) const
+ {
+ return rect == rhs.rect
+ && detection_confidence == rhs.detection_confidence
+ && ignore == rhs.ignore
+ && label == rhs.label;
+ }
+ };
+
+ inline mmod_rect ignored_mmod_rect(const rectangle& r)
+ {
+ mmod_rect temp(r);
+ temp.ignore = true;
+ return temp;
+ }
+
+ inline void serialize(const mmod_rect& item, std::ostream& out)
+ {
+ int version = 2;
+ serialize(version, out);
+ serialize(item.rect, out);
+ serialize(item.detection_confidence, out);
+ serialize(item.ignore, out);
+ serialize(item.label, out);
+ }
+
+ inline void deserialize(mmod_rect& item, std::istream& in)
+ {
+ int version = 0;
+ deserialize(version, in);
+ if (version != 1 && version != 2)
+ throw serialization_error("Unexpected version found while deserializing dlib::mmod_rect");
+ deserialize(item.rect, in);
+ deserialize(item.detection_confidence, in);
+ deserialize(item.ignore, in);
+ if (version == 2)
+ deserialize(item.label, in);
+ else
+ item.label = "";
+ }
+
+// ----------------------------------------------------------------------------------------
+
+}
+
+#endif // DLIB_FULL_OBJECT_DeTECTION_H_
+
diff --git a/ml/dlib/dlib/image_processing/full_object_detection_abstract.h b/ml/dlib/dlib/image_processing/full_object_detection_abstract.h
new file mode 100644
index 000000000..099ee01b0
--- /dev/null
+++ b/ml/dlib/dlib/image_processing/full_object_detection_abstract.h
@@ -0,0 +1,203 @@
+// Copyright (C) 2012 Davis E. King (davis@dlib.net)
+// License: Boost Software License See LICENSE.txt for the full license.
+#undef DLIB_FULL_OBJECT_DeTECTION_ABSTRACT_Hh_
+#ifdef DLIB_FULL_OBJECT_DeTECTION_ABSTRACT_Hh_
+
+#include <vector>
+#include "../geometry.h"
+#include "../serialize.h"
+
+namespace dlib
+{
+
+// ----------------------------------------------------------------------------------------
+
+ const static point OBJECT_PART_NOT_PRESENT(0x7FFFFFFF,
+ 0x7FFFFFFF);
+
+// ----------------------------------------------------------------------------------------
+
+ class full_object_detection
+ {
+ /*!
+ WHAT THIS OBJECT REPRESENTS
+ This object represents the location of an object in an image along with the
+ positions of each of its constituent parts.
+ !*/
+
+ public:
+
+ full_object_detection(
+ const rectangle& rect,
+ const std::vector<point>& parts
+ );
+ /*!
+ ensures
+ - #get_rect() == rect
+ - #num_parts() == parts.size()
+ - for all valid i:
+ - part(i) == parts[i]
+ !*/
+
+ full_object_detection(
+ );
+ /*!
+ ensures
+ - #get_rect().is_empty() == true
+ - #num_parts() == 0
+ !*/
+
+ explicit full_object_detection(
+ const rectangle& rect
+ );
+ /*!
+ ensures
+ - #get_rect() == rect
+ - #num_parts() == 0
+ !*/
+
+ const rectangle& get_rect(
+ ) const;
+ /*!
+ ensures
+ - returns the rectangle that indicates where this object is. In general,
+ this should be the bounding box for the object.
+ !*/
+
+ rectangle& get_rect(
+ );
+ /*!
+ ensures
+ - returns the rectangle that indicates where this object is. In general,
+ this should be the bounding box for the object.
+ !*/
+
+ unsigned long num_parts(
+ ) const;
+ /*!
+ ensures
+ - returns the number of parts in this object.
+ !*/
+
+ const point& part(
+ unsigned long idx
+ ) const;
+ /*!
+ requires
+ - idx < num_parts()
+ ensures
+ - returns the location of the center of the idx-th part of this object.
+ Note that it is valid for a part to be "not present". This is indicated
+ when the return value of part() is equal to OBJECT_PART_NOT_PRESENT.
+ This is useful for modeling object parts that are not always observed.
+ !*/
+
+ point& part(
+ unsigned long idx
+ );
+ /*!
+ requires
+ - idx < num_parts()
+ ensures
+ - returns the location of the center of the idx-th part of this object.
+ Note that it is valid for a part to be "not present". This is indicated
+ when the return value of part() is equal to OBJECT_PART_NOT_PRESENT.
+ This is useful for modeling object parts that are not always observed.
+ !*/
+
+ bool operator==(
+ const full_object_detection& rhs
+ ) const;
+ /*!
+ ensures
+ - returns true if and only if *this and rhs have identical state.
+ !*/
+ };
+
+// ----------------------------------------------------------------------------------------
+
+ void serialize (
+ const full_object_detection& item,
+ std::ostream& out
+ );
+ /*!
+ provides serialization support
+ !*/
+
+ void deserialize (
+ full_object_detection& item,
+ std::istream& in
+ );
+ /*!
+ provides deserialization support
+ !*/
+
+// ----------------------------------------------------------------------------------------
+
+ bool all_parts_in_rect (
+ const full_object_detection& obj
+ );
+ /*!
+ ensures
+ - returns true if all the parts in obj are contained within obj.get_rect().
+ That is, returns true if and only if, for all valid i, the following is
+ always true:
+ obj.get_rect().contains(obj.part(i)) == true || obj.part(i) == OBJECT_PART_NOT_PRESENT
+ !*/
+
+// ----------------------------------------------------------------------------------------
+// ----------------------------------------------------------------------------------------
+
+ struct mmod_rect
+ {
+ /*!
+ WHAT THIS OBJECT REPRESENTS
+ This is a simple struct that is used to give training data and receive detections
+ from the Max-Margin Object Detection loss layer loss_mmod_ object.
+ !*/
+
+ mmod_rect() = default;
+ mmod_rect(const rectangle& r) : rect(r) {}
+ mmod_rect(const rectangle& r, double score) : rect(r),detection_confidence(score) {}
+ mmod_rect(const rectangle& r, double score, const std::string& label) : rect(r),detection_confidence(score),label(label) {}
+
+ rectangle rect;
+ double detection_confidence = 0;
+ bool ignore = false;
+ std::string label;
+
+ operator rectangle() const { return rect; }
+
+ bool operator == (const mmod_rect& rhs) const;
+ /*!
+ ensures
+ - returns true if and only if all the elements of this object compare equal
+ to the corresponding elements of rhs.
+ !*/
+ };
+
+ mmod_rect ignored_mmod_rect(
+ const rectangle& r
+ );
+ /*!
+ ensures
+ - returns a mmod_rect R such that:
+ - R.rect == r
+ - R.ignore == true
+ - R.detection_confidence == 0
+ - R.label == ""
+ !*/
+
+ void serialize(const mmod_rect& item, std::ostream& out);
+ void deserialize(mmod_rect& item, std::istream& in);
+ /*!
+ provides serialization support
+ !*/
+
+// ----------------------------------------------------------------------------------------
+
+}
+
+#endif // DLIB_FULL_OBJECT_DeTECTION_ABSTRACT_Hh_
+
+
diff --git a/ml/dlib/dlib/image_processing/generic_image.h b/ml/dlib/dlib/image_processing/generic_image.h
new file mode 100644
index 000000000..362277368
--- /dev/null
+++ b/ml/dlib/dlib/image_processing/generic_image.h
@@ -0,0 +1,431 @@
+// Copyright (C) 2014 Davis E. King (davis@dlib.net)
+// License: Boost Software License See LICENSE.txt for the full license.
+#ifndef DLIB_GeNERIC_IMAGE_Hh_
+#define DLIB_GeNERIC_IMAGE_Hh_
+
+#include "../assert.h"
+
+namespace dlib
+{
+
+ /*!
+ In dlib, an "image" is any object that implements the generic image interface. In
+ particular, this simply means that an image type (let's refer to it as image_type
+ from here on) has the following seven global functions defined for it:
+ - long num_rows (const image_type& img)
+ - long num_columns (const image_type& img)
+ - void set_image_size( image_type& img, long rows, long cols)
+ - void* image_data ( image_type& img)
+ - const void* image_data (const image_type& img)
+ - long width_step (const image_type& img)
+ - void swap ( image_type& a, image_type& b)
+ And also provides a specialization of the image_traits template that looks like:
+ namespace dlib
+ {
+ template <>
+ struct image_traits<image_type>
+ {
+ typedef the_type_of_pixel_used_in_image_type pixel_type;
+ };
+ }
+
+ Additionally, an image object must be default constructable. This means that
+ expressions of the form:
+ image_type img;
+ Must be legal.
+
+ Finally, the type of pixel in image_type must have a pixel_traits specialization.
+ That is, pixel_traits<typename image_traits<image_type>::pixel_type> must be one of
+ the specializations of pixel_traits.
+
+
+ To be very precise, the seven functions defined above are defined thusly:
+
+ long num_rows(
+ const image_type& img
+ );
+ /!*
+ ensures
+ - returns the number of rows in the given image
+ *!/
+
+ long num_columns(
+ const image_type& img
+ );
+ /!*
+ ensures
+ - returns the number of columns in the given image
+ *!/
+
+ void set_image_size(
+ image_type& img,
+ long rows,
+ long cols
+ );
+ /!*
+ requires
+ - rows >= 0 && cols >= 0
+ ensures
+ - num_rows(#img) == rows
+ - num_columns(#img) == cols
+ *!/
+
+ void* image_data(
+ image_type& img
+ );
+ /!*
+ ensures
+ - returns a non-const pointer to the pixel at row and column position 0,0
+ in the given image. Or if the image has zero rows or columns in it
+ then this function returns NULL.
+ - The image lays pixels down in row major order. However, there might
+ be padding at the end of each row. The amount of padding is given by
+ width_step(img).
+ *!/
+
+ const void* image_data(
+ const image_type& img
+ );
+ /!*
+ ensures
+ - returns a const pointer to the pixel at row and column position 0,0 in
+ the given image. Or if the image has zero rows or columns in it then
+ this function returns NULL.
+ - The image lays pixels down in row major order. However, there might
+ be padding at the end of each row. The amount of padding is given by
+ width_step(img).
+ *!/
+
+ long width_step(
+ const image_type& img
+ );
+ /!*
+ ensures
+ - returns the size of one row of the image, in bytes. More precisely,
+ return a number N such that: (char*)image_data(img) + N*R == a
+ pointer to the first pixel in the R-th row of the image. This means
+ that the image must lay its pixels down in row major order.
+ *!/
+
+ void swap(
+ image_type& a,
+ image_type& b
+ );
+ /!*
+ ensures
+ - swaps the state of a and b
+ *!/
+ !*/
+
+// ----------------------------------------------------------------------------------------
+
+ template <typename image_type>
+ struct image_traits;
+ /*!
+ WHAT THIS OBJECT REPRESENTS
+ This is a traits class for generic image objects. You can use it to find out
+ the pixel type contained within an image via an expression of the form:
+ image_traits<image_type>::pixel_type
+ !*/
+
+// ----------------------------------------------------------------------------------------
+// ----------------------------------------------------------------------------------------
+// UTILITIES TO MAKE ACCESSING IMAGE PIXELS SIMPLER
+// ----------------------------------------------------------------------------------------
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename image_type
+ >
+ class image_view
+ {
+ /*!
+ REQUIREMENTS ON image_type
+ image_type must be an image object as defined at the top of this file.
+
+ WHAT THIS OBJECT REPRESENTS
+ This object takes an image object and wraps it with an interface that makes
+ it look like a dlib::array2d. That is, it makes it look similar to a
+ regular 2-dimensional C style array, making code which operates on the
+ pixels simple to read.
+
+ Note that an image_view instance is valid until the image given to its
+ constructor is modified through an interface other than the image_view
+ instance. This is because, for example, someone might cause the underlying
+ image object to reallocate its memory, thus invalidating the pointer to its
+ pixel data stored in the image_view.
+
+ As an side, the reason why this object stores a pointer to the image
+ object's data and uses that pointer instead of calling image_data() each
+ time a pixel is accessed is to allow for image objects to implement
+ complex, and possibly slow, image_data() functions. For example, an image
+ object might perform some kind of synchronization between a GPU and the
+ host memory during a call to image_data(). Therefore, we call image_data()
+ only in image_view's constructor to avoid the performance penalty of
+ calling it for each pixel access.
+ !*/
+
+ public:
+ typedef typename image_traits<image_type>::pixel_type pixel_type;
+
+ image_view(
+ image_type& img
+ ) :
+ _data((char*)image_data(img)),
+ _width_step(width_step(img)),
+ _nr(num_rows(img)),
+ _nc(num_columns(img)),
+ _img(&img)
+ {}
+
+ long nr() const { return _nr; }
+ /*!
+ ensures
+ - returns the number of rows in this image.
+ !*/
+
+ long nc() const { return _nc; }
+ /*!
+ ensures
+ - returns the number of columns in this image.
+ !*/
+
+ unsigned long size() const { return static_cast<unsigned long>(nr()*nc()); }
+ /*!
+ ensures
+ - returns the number of pixels in this image.
+ !*/
+
+#ifndef ENABLE_ASSERTS
+ pixel_type* operator[] (long row) { return (pixel_type*)(_data+_width_step*row); }
+ /*!
+ requires
+ - 0 <= row < nr()
+ ensures
+ - returns a pointer to the first pixel in the row-th row. Therefore, the
+ pixel at row and column position r,c can be accessed via (*this)[r][c].
+ !*/
+
+ const pixel_type* operator[] (long row) const { return (const pixel_type*)(_data+_width_step*row); }
+ /*!
+ requires
+ - 0 <= row < nr()
+ ensures
+ - returns a const pointer to the first pixel in the row-th row. Therefore,
+ the pixel at row and column position r,c can be accessed via
+ (*this)[r][c].
+ !*/
+#else
+ // If asserts are enabled then we need to return a proxy class so we can make sure
+ // the column accesses don't go out of bounds.
+ struct pix_row
+ {
+ pix_row(pixel_type* data_, long nc_) : data(data_),_nc(nc_) {}
+ const pixel_type& operator[] (long col) const
+ {
+ DLIB_ASSERT(0 <= col && col < _nc,
+ "\t The given column index is out of range."
+ << "\n\t col: " << col
+ << "\n\t _nc: " << _nc);
+ return data[col];
+ }
+ pixel_type& operator[] (long col)
+ {
+ DLIB_ASSERT(0 <= col && col < _nc,
+ "\t The given column index is out of range."
+ << "\n\t col: " << col
+ << "\n\t _nc: " << _nc);
+ return data[col];
+ }
+ private:
+ pixel_type* const data;
+ const long _nc;
+ };
+ pix_row operator[] (long row)
+ {
+ DLIB_ASSERT(0 <= row && row < _nr,
+ "\t The given row index is out of range."
+ << "\n\t row: " << row
+ << "\n\t _nr: " << _nr);
+ return pix_row((pixel_type*)(_data+_width_step*row), _nc);
+ }
+ const pix_row operator[] (long row) const
+ {
+ DLIB_ASSERT(0 <= row && row < _nr,
+ "\t The given row index is out of range."
+ << "\n\t row: " << row
+ << "\n\t _nr: " << _nr);
+ return pix_row((pixel_type*)(_data+_width_step*row), _nc);
+ }
+#endif
+
+ void set_size(long rows, long cols)
+ /*!
+ requires
+ - rows >= 0 && cols >= 0
+ ensures
+ - Tells the underlying image to resize itself to have the given number of
+ rows and columns.
+ - #nr() == rows
+ - #nc() == cols
+ !*/
+ {
+ DLIB_ASSERT((cols >= 0 && rows >= 0),
+ "\t image_view::set_size(long rows, long cols)"
+ << "\n\t The images can't have negative rows or columns."
+ << "\n\t cols: " << cols
+ << "\n\t rows: " << rows
+ );
+ set_image_size(*_img, rows, cols); *this = *_img;
+ }
+
+ void clear() { set_size(0,0); }
+ /*!
+ ensures
+ - sets the image to have 0 pixels in it.
+ !*/
+
+ private:
+
+ char* _data;
+ long _width_step;
+ long _nr;
+ long _nc;
+ image_type* _img;
+ };
+
+// ----------------------------------------------------------------------------------------
+
+ template <typename image_type>
+ class const_image_view
+ {
+ /*!
+ REQUIREMENTS ON image_type
+ image_type must be an image object as defined at the top of this file.
+
+ WHAT THIS OBJECT REPRESENTS
+ This object is just like the image_view except that it provides a "const"
+ view into an image. That is, it has the same interface as image_view
+ except that you can't modify the image through a const_image_view.
+ !*/
+
+ public:
+ typedef typename image_traits<image_type>::pixel_type pixel_type;
+
+ const_image_view(
+ const image_type& img
+ ) :
+ _data((char*)image_data(img)),
+ _width_step(width_step(img)),
+ _nr(num_rows(img)),
+ _nc(num_columns(img))
+ {}
+
+ long nr() const { return _nr; }
+ long nc() const { return _nc; }
+ unsigned long size() const { return static_cast<unsigned long>(nr()*nc()); }
+#ifndef ENABLE_ASSERTS
+ const pixel_type* operator[] (long row) const { return (const pixel_type*)(_data+_width_step*row); }
+#else
+ // If asserts are enabled then we need to return a proxy class so we can make sure
+ // the column accesses don't go out of bounds.
+ struct pix_row
+ {
+ pix_row(pixel_type* data_, long nc_) : data(data_),_nc(nc_) {}
+ const pixel_type& operator[] (long col) const
+ {
+ DLIB_ASSERT(0 <= col && col < _nc,
+ "\t The given column index is out of range."
+ << "\n\t col: " << col
+ << "\n\t _nc: " << _nc);
+ return data[col];
+ }
+ private:
+ pixel_type* const data;
+ const long _nc;
+ };
+ const pix_row operator[] (long row) const
+ {
+ DLIB_ASSERT(0 <= row && row < _nr,
+ "\t The given row index is out of range."
+ << "\n\t row: " << row
+ << "\n\t _nr: " << _nr);
+ return pix_row((pixel_type*)(_data+_width_step*row), _nc);
+ }
+#endif
+
+ private:
+ const char* _data;
+ long _width_step;
+ long _nr;
+ long _nc;
+ };
+
+// ----------------------------------------------------------------------------------------
+
+ template <typename image_type>
+ image_view<image_type> make_image_view ( image_type& img)
+ { return image_view<image_type>(img); }
+ /*!
+ requires
+ - image_type == an image object that implements the interface defined at the
+ top of this file.
+ ensures
+ - constructs an image_view from an image object
+ !*/
+
+ template <typename image_type>
+ const_image_view<image_type> make_image_view (const image_type& img)
+ { return const_image_view<image_type>(img); }
+ /*!
+ requires
+ - image_type == an image object that implements the interface defined at the
+ top of this file.
+ ensures
+ - constructs a const_image_view from an image object
+ !*/
+
+// ----------------------------------------------------------------------------------------
+
+ template <typename image_type>
+ inline unsigned long image_size(
+ const image_type& img
+ ) { return num_columns(img)*num_rows(img); }
+ /*!
+ requires
+ - image_type == an image object that implements the interface defined at the
+ top of this file.
+ ensures
+ - returns the number of pixels in the given image.
+ !*/
+
+// ----------------------------------------------------------------------------------------
+
+ template <typename image_type>
+ inline long num_rows(
+ const image_type& img
+ ) { return img.nr(); }
+ /*!
+ ensures
+ - By default, try to use the member function .nr() to determine the number
+ of rows in an image. However, as stated at the top of this file, image
+ objects should provide their own overload of num_rows() if needed.
+ !*/
+
+ template <typename image_type>
+ inline long num_columns(
+ const image_type& img
+ ) { return img.nc(); }
+ /*!
+ ensures
+ - By default, try to use the member function .nc() to determine the number
+ of columns in an image. However, as stated at the top of this file, image
+ objects should provide their own overload of num_rows() if needed.
+ !*/
+
+// ----------------------------------------------------------------------------------------
+
+}
+
+#endif // DLIB_GeNERIC_IMAGE_Hh_
+
diff --git a/ml/dlib/dlib/image_processing/object_detector.h b/ml/dlib/dlib/image_processing/object_detector.h
new file mode 100644
index 000000000..9f78abd19
--- /dev/null
+++ b/ml/dlib/dlib/image_processing/object_detector.h
@@ -0,0 +1,626 @@
+// Copyright (C) 2011 Davis E. King (davis@dlib.net)
+// License: Boost Software License See LICENSE.txt for the full license.
+#ifndef DLIB_OBJECT_DeTECTOR_Hh_
+#define DLIB_OBJECT_DeTECTOR_Hh_
+
+#include "object_detector_abstract.h"
+#include "../geometry.h"
+#include <vector>
+#include "box_overlap_testing.h"
+#include "full_object_detection.h"
+
+namespace dlib
+{
+
+// ----------------------------------------------------------------------------------------
+
+ struct rect_detection
+ {
+ double detection_confidence;
+ unsigned long weight_index;
+ rectangle rect;
+
+ bool operator<(const rect_detection& item) const { return detection_confidence < item.detection_confidence; }
+ };
+
+ struct full_detection
+ {
+ double detection_confidence;
+ unsigned long weight_index;
+ full_object_detection rect;
+
+ bool operator<(const full_detection& item) const { return detection_confidence < item.detection_confidence; }
+ };
+
+// ----------------------------------------------------------------------------------------
+
+ template <typename image_scanner_type>
+ struct processed_weight_vector
+ {
+ processed_weight_vector(){}
+
+ typedef typename image_scanner_type::feature_vector_type feature_vector_type;
+
+ void init (
+ const image_scanner_type&
+ )
+ /*!
+ requires
+ - w has already been assigned its value. Note that the point of this
+ function is to allow an image scanner to overload the
+ processed_weight_vector template and provide some different kind of
+ object as the output of get_detect_argument(). For example, the
+ scan_fhog_pyramid object uses an overload that causes
+ get_detect_argument() to return the special fhog_filterbank object
+ instead of a feature_vector_type. This avoids needing to construct the
+ fhog_filterbank during each call to detect and therefore speeds up
+ detection.
+ !*/
+ {}
+
+ // return the first argument to image_scanner_type::detect()
+ const feature_vector_type& get_detect_argument() const { return w; }
+
+ feature_vector_type w;
+ };
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename image_scanner_type_
+ >
+ class object_detector
+ {
+ public:
+ typedef image_scanner_type_ image_scanner_type;
+ typedef typename image_scanner_type::feature_vector_type feature_vector_type;
+
+ object_detector (
+ );
+
+ object_detector (
+ const object_detector& item
+ );
+
+ object_detector (
+ const image_scanner_type& scanner_,
+ const test_box_overlap& overlap_tester_,
+ const feature_vector_type& w_
+ );
+
+ object_detector (
+ const image_scanner_type& scanner_,
+ const test_box_overlap& overlap_tester_,
+ const std::vector<feature_vector_type>& w_
+ );
+
+ explicit object_detector (
+ const std::vector<object_detector>& detectors
+ );
+
+ unsigned long num_detectors (
+ ) const { return w.size(); }
+
+ const feature_vector_type& get_w (
+ unsigned long idx = 0
+ ) const { return w[idx].w; }
+
+ const processed_weight_vector<image_scanner_type>& get_processed_w (
+ unsigned long idx = 0
+ ) const { return w[idx]; }
+
+ const test_box_overlap& get_overlap_tester (
+ ) const;
+
+ const image_scanner_type& get_scanner (
+ ) const;
+
+ object_detector& operator= (
+ const object_detector& item
+ );
+
+ template <
+ typename image_type
+ >
+ std::vector<rectangle> operator() (
+ const image_type& img,
+ double adjust_threshold = 0
+ );
+
+ template <
+ typename image_type
+ >
+ void operator() (
+ const image_type& img,
+ std::vector<std::pair<double, rectangle> >& final_dets,
+ double adjust_threshold = 0
+ );
+
+ template <
+ typename image_type
+ >
+ void operator() (
+ const image_type& img,
+ std::vector<std::pair<double, full_object_detection> >& final_dets,
+ double adjust_threshold = 0
+ );
+
+ template <
+ typename image_type
+ >
+ void operator() (
+ const image_type& img,
+ std::vector<full_object_detection>& final_dets,
+ double adjust_threshold = 0
+ );
+
+ // These typedefs are here for backwards compatibility with previous versions of
+ // dlib.
+ typedef ::dlib::rect_detection rect_detection;
+ typedef ::dlib::full_detection full_detection;
+
+ template <
+ typename image_type
+ >
+ void operator() (
+ const image_type& img,
+ std::vector<rect_detection>& final_dets,
+ double adjust_threshold = 0
+ );
+
+ template <
+ typename image_type
+ >
+ void operator() (
+ const image_type& img,
+ std::vector<full_detection>& final_dets,
+ double adjust_threshold = 0
+ );
+
+ template <typename T>
+ friend void serialize (
+ const object_detector<T>& item,
+ std::ostream& out
+ );
+
+ template <typename T>
+ friend void deserialize (
+ object_detector<T>& item,
+ std::istream& in
+ );
+
+ private:
+
+ bool overlaps_any_box (
+ const std::vector<rect_detection>& rects,
+ const dlib::rectangle& rect
+ ) const
+ {
+ for (unsigned long i = 0; i < rects.size(); ++i)
+ {
+ if (boxes_overlap(rects[i].rect, rect))
+ return true;
+ }
+ return false;
+ }
+
+ test_box_overlap boxes_overlap;
+ std::vector<processed_weight_vector<image_scanner_type> > w;
+ image_scanner_type scanner;
+ };
+
+// ----------------------------------------------------------------------------------------
+
+ template <typename T>
+ void serialize (
+ const object_detector<T>& item,
+ std::ostream& out
+ )
+ {
+ int version = 2;
+ serialize(version, out);
+
+ T scanner;
+ scanner.copy_configuration(item.scanner);
+ serialize(scanner, out);
+ serialize(item.boxes_overlap, out);
+ // serialize all the weight vectors
+ serialize(item.w.size(), out);
+ for (unsigned long i = 0; i < item.w.size(); ++i)
+ serialize(item.w[i].w, out);
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <typename T>
+ void deserialize (
+ object_detector<T>& item,
+ std::istream& in
+ )
+ {
+ int version = 0;
+ deserialize(version, in);
+ if (version == 1)
+ {
+ deserialize(item.scanner, in);
+ item.w.resize(1);
+ deserialize(item.w[0].w, in);
+ item.w[0].init(item.scanner);
+ deserialize(item.boxes_overlap, in);
+ }
+ else if (version == 2)
+ {
+ deserialize(item.scanner, in);
+ deserialize(item.boxes_overlap, in);
+ unsigned long num_detectors = 0;
+ deserialize(num_detectors, in);
+ item.w.resize(num_detectors);
+ for (unsigned long i = 0; i < item.w.size(); ++i)
+ {
+ deserialize(item.w[i].w, in);
+ item.w[i].init(item.scanner);
+ }
+ }
+ else
+ {
+ throw serialization_error("Unexpected version encountered while deserializing a dlib::object_detector object.");
+ }
+ }
+
+// ----------------------------------------------------------------------------------------
+// ----------------------------------------------------------------------------------------
+// object_detector member functions
+// ----------------------------------------------------------------------------------------
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename image_scanner_type
+ >
+ object_detector<image_scanner_type>::
+ object_detector (
+ )
+ {
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename image_scanner_type
+ >
+ object_detector<image_scanner_type>::
+ object_detector (
+ const object_detector& item
+ )
+ {
+ boxes_overlap = item.boxes_overlap;
+ w = item.w;
+ scanner.copy_configuration(item.scanner);
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename image_scanner_type
+ >
+ object_detector<image_scanner_type>::
+ object_detector (
+ const image_scanner_type& scanner_,
+ const test_box_overlap& overlap_tester,
+ const feature_vector_type& w_
+ ) :
+ boxes_overlap(overlap_tester)
+ {
+ // make sure requires clause is not broken
+ DLIB_ASSERT(scanner_.get_num_detection_templates() > 0 &&
+ w_.size() == scanner_.get_num_dimensions() + 1,
+ "\t object_detector::object_detector(scanner_,overlap_tester,w_)"
+ << "\n\t Invalid inputs were given to this function "
+ << "\n\t scanner_.get_num_detection_templates(): " << scanner_.get_num_detection_templates()
+ << "\n\t w_.size(): " << w_.size()
+ << "\n\t scanner_.get_num_dimensions(): " << scanner_.get_num_dimensions()
+ << "\n\t this: " << this
+ );
+
+ scanner.copy_configuration(scanner_);
+ w.resize(1);
+ w[0].w = w_;
+ w[0].init(scanner);
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename image_scanner_type
+ >
+ object_detector<image_scanner_type>::
+ object_detector (
+ const image_scanner_type& scanner_,
+ const test_box_overlap& overlap_tester,
+ const std::vector<feature_vector_type>& w_
+ ) :
+ boxes_overlap(overlap_tester)
+ {
+ // make sure requires clause is not broken
+ DLIB_CASSERT(scanner_.get_num_detection_templates() > 0 && w_.size() > 0,
+ "\t object_detector::object_detector(scanner_,overlap_tester,w_)"
+ << "\n\t Invalid inputs were given to this function "
+ << "\n\t scanner_.get_num_detection_templates(): " << scanner_.get_num_detection_templates()
+ << "\n\t w_.size(): " << w_.size()
+ << "\n\t this: " << this
+ );
+
+ for (unsigned long i = 0; i < w_.size(); ++i)
+ {
+ DLIB_CASSERT(w_[i].size() == scanner_.get_num_dimensions() + 1,
+ "\t object_detector::object_detector(scanner_,overlap_tester,w_)"
+ << "\n\t Invalid inputs were given to this function "
+ << "\n\t scanner_.get_num_detection_templates(): " << scanner_.get_num_detection_templates()
+ << "\n\t w_["<<i<<"].size(): " << w_[i].size()
+ << "\n\t scanner_.get_num_dimensions(): " << scanner_.get_num_dimensions()
+ << "\n\t this: " << this
+ );
+ }
+
+ scanner.copy_configuration(scanner_);
+ w.resize(w_.size());
+ for (unsigned long i = 0; i < w.size(); ++i)
+ {
+ w[i].w = w_[i];
+ w[i].init(scanner);
+ }
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename image_scanner_type
+ >
+ object_detector<image_scanner_type>::
+ object_detector (
+ const std::vector<object_detector>& detectors
+ )
+ {
+ DLIB_CASSERT(detectors.size() != 0,
+ "\t object_detector::object_detector(detectors)"
+ << "\n\t Invalid inputs were given to this function "
+ << "\n\t this: " << this
+ );
+ std::vector<feature_vector_type> weights;
+ weights.reserve(detectors.size());
+ for (unsigned long i = 0; i < detectors.size(); ++i)
+ {
+ for (unsigned long j = 0; j < detectors[i].num_detectors(); ++j)
+ weights.push_back(detectors[i].get_w(j));
+ }
+
+ *this = object_detector(detectors[0].get_scanner(), detectors[0].get_overlap_tester(), weights);
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename image_scanner_type
+ >
+ object_detector<image_scanner_type>& object_detector<image_scanner_type>::
+ operator= (
+ const object_detector& item
+ )
+ {
+ if (this == &item)
+ return *this;
+
+ boxes_overlap = item.boxes_overlap;
+ w = item.w;
+ scanner.copy_configuration(item.scanner);
+ return *this;
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename image_scanner_type
+ >
+ template <
+ typename image_type
+ >
+ void object_detector<image_scanner_type>::
+ operator() (
+ const image_type& img,
+ std::vector<rect_detection>& final_dets,
+ double adjust_threshold
+ )
+ {
+ scanner.load(img);
+ std::vector<std::pair<double, rectangle> > dets;
+ std::vector<rect_detection> dets_accum;
+ for (unsigned long i = 0; i < w.size(); ++i)
+ {
+ const double thresh = w[i].w(scanner.get_num_dimensions());
+ scanner.detect(w[i].get_detect_argument(), dets, thresh + adjust_threshold);
+ for (unsigned long j = 0; j < dets.size(); ++j)
+ {
+ rect_detection temp;
+ temp.detection_confidence = dets[j].first-thresh;
+ temp.weight_index = i;
+ temp.rect = dets[j].second;
+ dets_accum.push_back(temp);
+ }
+ }
+
+ // Do non-max suppression
+ final_dets.clear();
+ if (w.size() > 1)
+ std::sort(dets_accum.rbegin(), dets_accum.rend());
+ for (unsigned long i = 0; i < dets_accum.size(); ++i)
+ {
+ if (overlaps_any_box(final_dets, dets_accum[i].rect))
+ continue;
+
+ final_dets.push_back(dets_accum[i]);
+ }
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename image_scanner_type
+ >
+ template <
+ typename image_type
+ >
+ void object_detector<image_scanner_type>::
+ operator() (
+ const image_type& img,
+ std::vector<full_detection>& final_dets,
+ double adjust_threshold
+ )
+ {
+ std::vector<rect_detection> dets;
+ (*this)(img,dets,adjust_threshold);
+
+ final_dets.resize(dets.size());
+
+ // convert all the rectangle detections into full_object_detections.
+ for (unsigned long i = 0; i < dets.size(); ++i)
+ {
+ final_dets[i].detection_confidence = dets[i].detection_confidence;
+ final_dets[i].weight_index = dets[i].weight_index;
+ final_dets[i].rect = scanner.get_full_object_detection(dets[i].rect, w[dets[i].weight_index].w);
+ }
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename image_scanner_type
+ >
+ template <
+ typename image_type
+ >
+ std::vector<rectangle> object_detector<image_scanner_type>::
+ operator() (
+ const image_type& img,
+ double adjust_threshold
+ )
+ {
+ std::vector<rect_detection> dets;
+ (*this)(img,dets,adjust_threshold);
+
+ std::vector<rectangle> final_dets(dets.size());
+ for (unsigned long i = 0; i < dets.size(); ++i)
+ final_dets[i] = dets[i].rect;
+
+ return final_dets;
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename image_scanner_type
+ >
+ template <
+ typename image_type
+ >
+ void object_detector<image_scanner_type>::
+ operator() (
+ const image_type& img,
+ std::vector<std::pair<double, rectangle> >& final_dets,
+ double adjust_threshold
+ )
+ {
+ std::vector<rect_detection> dets;
+ (*this)(img,dets,adjust_threshold);
+
+ final_dets.resize(dets.size());
+ for (unsigned long i = 0; i < dets.size(); ++i)
+ final_dets[i] = std::make_pair(dets[i].detection_confidence,dets[i].rect);
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename image_scanner_type
+ >
+ template <
+ typename image_type
+ >
+ void object_detector<image_scanner_type>::
+ operator() (
+ const image_type& img,
+ std::vector<std::pair<double, full_object_detection> >& final_dets,
+ double adjust_threshold
+ )
+ {
+ std::vector<rect_detection> dets;
+ (*this)(img,dets,adjust_threshold);
+
+ final_dets.clear();
+ final_dets.reserve(dets.size());
+
+ // convert all the rectangle detections into full_object_detections.
+ for (unsigned long i = 0; i < dets.size(); ++i)
+ {
+ final_dets.push_back(std::make_pair(dets[i].detection_confidence,
+ scanner.get_full_object_detection(dets[i].rect, w[dets[i].weight_index].w)));
+ }
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename image_scanner_type
+ >
+ template <
+ typename image_type
+ >
+ void object_detector<image_scanner_type>::
+ operator() (
+ const image_type& img,
+ std::vector<full_object_detection>& final_dets,
+ double adjust_threshold
+ )
+ {
+ std::vector<rect_detection> dets;
+ (*this)(img,dets,adjust_threshold);
+
+ final_dets.clear();
+ final_dets.reserve(dets.size());
+
+ // convert all the rectangle detections into full_object_detections.
+ for (unsigned long i = 0; i < dets.size(); ++i)
+ {
+ final_dets.push_back(scanner.get_full_object_detection(dets[i].rect, w[dets[i].weight_index].w));
+ }
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename image_scanner_type
+ >
+ const test_box_overlap& object_detector<image_scanner_type>::
+ get_overlap_tester (
+ ) const
+ {
+ return boxes_overlap;
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename image_scanner_type
+ >
+ const image_scanner_type& object_detector<image_scanner_type>::
+ get_scanner (
+ ) const
+ {
+ return scanner;
+ }
+
+// ----------------------------------------------------------------------------------------
+
+}
+
+#endif // DLIB_OBJECT_DeTECTOR_Hh_
+
+
diff --git a/ml/dlib/dlib/image_processing/object_detector_abstract.h b/ml/dlib/dlib/image_processing/object_detector_abstract.h
new file mode 100644
index 000000000..9578d8b03
--- /dev/null
+++ b/ml/dlib/dlib/image_processing/object_detector_abstract.h
@@ -0,0 +1,404 @@
+// Copyright (C) 2011 Davis E. King (davis@dlib.net)
+// License: Boost Software License See LICENSE.txt for the full license.
+#undef DLIB_OBJECT_DeTECTOR_ABSTRACT_Hh_
+#ifdef DLIB_OBJECT_DeTECTOR_ABSTRACT_Hh_
+
+#include "../geometry.h"
+#include <vector>
+#include "box_overlap_testing_abstract.h"
+#include "full_object_detection_abstract.h"
+
+namespace dlib
+{
+
+// ----------------------------------------------------------------------------------------
+
+ struct rect_detection
+ {
+ double detection_confidence;
+ unsigned long weight_index;
+ rectangle rect;
+ };
+
+ struct full_detection
+ {
+ double detection_confidence;
+ unsigned long weight_index;
+ full_object_detection rect;
+ };
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename image_scanner_type_
+ >
+ class object_detector
+ {
+ /*!
+ REQUIREMENTS ON image_scanner_type_
+ image_scanner_type_ must be an implementation of
+ dlib/image_processing/scan_image_pyramid_abstract.h or
+ dlib/image_processing/scan_fhog_pyramid.h or
+ dlib/image_processing/scan_image_custom.h or
+ dlib/image_processing/scan_image_boxes_abstract.h
+
+ WHAT THIS OBJECT REPRESENTS
+ This object is a tool for detecting the positions of objects in an image.
+ In particular, it is a simple container to aggregate an instance of an image
+ scanner (i.e. scan_image_pyramid, scan_fhog_pyramid, scan_image_custom, or
+ scan_image_boxes), the weight vector needed by one of these image scanners,
+ and finally an instance of test_box_overlap. The test_box_overlap object
+ is used to perform non-max suppression on the output of the image scanner
+ object.
+
+ Note further that this object can contain multiple weight vectors. In this
+ case, it will run the image scanner multiple times, once with each of the
+ weight vectors. Then it will aggregate the results from all runs, perform
+ non-max suppression and then return the results. Therefore, the object_detector
+ can also be used as a container for a set of object detectors that all use
+ the same image scanner but different weight vectors. This is useful since
+ the object detection procedure has two parts. A loading step where the
+ image is loaded into the scanner, then a detect step which uses the weight
+ vector to locate objects in the image. Since the loading step is independent
+ of the weight vector it is most efficient to run multiple detectors by
+ performing one load into a scanner followed by multiple detect steps. This
+ avoids unnecessarily loading the same image into the scanner multiple times.
+ !*/
+ public:
+ typedef image_scanner_type_ image_scanner_type;
+ typedef typename image_scanner_type::feature_vector_type feature_vector_type;
+
+ object_detector (
+ );
+ /*!
+ ensures
+ - This detector won't generate any detections when
+ presented with an image.
+ - #num_detectors() == 0
+ !*/
+
+ object_detector (
+ const object_detector& item
+ );
+ /*!
+ ensures
+ - #*this is a copy of item
+ - #get_scanner() == item.get_scanner()
+ (note that only the "configuration" of item.get_scanner() is copied.
+ I.e. the copy is done using copy_configuration())
+ !*/
+
+ object_detector (
+ const image_scanner_type& scanner,
+ const test_box_overlap& overlap_tester,
+ const feature_vector_type& w
+ );
+ /*!
+ requires
+ - w.size() == scanner.get_num_dimensions() + 1
+ - scanner.get_num_detection_templates() > 0
+ ensures
+ - When the operator() member function is called it will
+ invoke scanner.detect(w,dets,w(w.size()-1)), suppress
+ overlapping detections, and then report the results.
+ - when #*this is used to detect objects, the set of
+ output detections will never contain any overlaps
+ with respect to overlap_tester. That is, for all
+ pairs of returned detections A and B, we will always
+ have: overlap_tester(A,B) == false
+ - #get_w() == w
+ - #get_overlap_tester() == overlap_tester
+ - #get_scanner() == scanner
+ (note that only the "configuration" of scanner is copied.
+ I.e. the copy is done using copy_configuration())
+ - #num_detectors() == 1
+ !*/
+
+ object_detector (
+ const image_scanner_type& scanner,
+ const test_box_overlap& overlap_tester,
+ const std::vector<feature_vector_type>& w
+ );
+ /*!
+ requires
+ - for all valid i:
+ - w[i].size() == scanner.get_num_dimensions() + 1
+ - scanner.get_num_detection_templates() > 0
+ - w.size() > 0
+ ensures
+ - When the operator() member function is called it will invoke
+ get_scanner().detect(w[i],dets,w[i](w[i].size()-1)) for all valid i. Then it
+ will take all the detections output by the calls to detect() and suppress
+ overlapping detections, and finally report the results.
+ - when #*this is used to detect objects, the set of output detections will
+ never contain any overlaps with respect to overlap_tester. That is, for
+ all pairs of returned detections A and B, we will always have:
+ overlap_tester(A,B) == false
+ - for all valid i:
+ - #get_w(i) == w[i]
+ - #num_detectors() == w.size()
+ - #get_overlap_tester() == overlap_tester
+ - #get_scanner() == scanner
+ (note that only the "configuration" of scanner is copied.
+ I.e. the copy is done using copy_configuration())
+ !*/
+
+ explicit object_detector (
+ const std::vector<object_detector>& detectors
+ );
+ /*!
+ requires
+ - detectors.size() != 0
+ - All the detectors must use compatibly configured scanners. That is, it
+ must make sense for the weight vector from one detector to be used with
+ the scanner from any other.
+ - for all valid i:
+ - detectors[i].get_scanner().get_num_dimensions() == detectors[0].get_scanner().get_num_dimensions()
+ (i.e. all the detectors use scanners that use the same kind of feature vectors.)
+ ensures
+ - Very much like the above constructor, this constructor takes all the
+ given detectors and packs them into #*this. That is, invoking operator()
+ on #*this will run all the detectors, perform non-max suppression, and
+ then report the results.
+ - When #*this is used to detect objects, the set of output detections will
+ never contain any overlaps with respect to overlap_tester. That is, for
+ all pairs of returned detections A and B, we will always have:
+ overlap_tester(A,B) == false
+ - #num_detectors() == The sum of detectors[i].num_detectors() for all valid i.
+ - #get_overlap_tester() == detectors[0].get_overlap_tester()
+ - #get_scanner() == detectors[0].get_scanner()
+ (note that only the "configuration" of scanner is copied. I.e. the copy
+ is done using copy_configuration())
+ !*/
+
+ unsigned long num_detectors (
+ ) const;
+ /*!
+ ensures
+ - returns the number of weight vectors in this object. Since each weight
+ vector logically represents an object detector, this returns the number
+ of object detectors contained in this object.
+ !*/
+
+ const feature_vector_type& get_w (
+ unsigned long idx = 0
+ ) const;
+ /*!
+ requires
+ - idx < num_detectors()
+ ensures
+ - returns the idx-th weight vector loaded into this object. All the weight vectors
+ have the same dimension and logically each represents a different detector.
+ !*/
+
+ const test_box_overlap& get_overlap_tester (
+ ) const;
+ /*!
+ ensures
+ - returns the overlap tester used by this object
+ !*/
+
+ const image_scanner_type& get_scanner (
+ ) const;
+ /*!
+ ensures
+ - returns the image scanner used by this object.
+ !*/
+
+ object_detector& operator= (
+ const object_detector& item
+ );
+ /*!
+ ensures
+ - #*this is a copy of item
+ - #get_scanner() == item.get_scanner()
+ (note that only the "configuration" of item.get_scanner() is
+ copied. I.e. the copy is done using copy_configuration())
+ - returns #*this
+ !*/
+
+ template <
+ typename image_type
+ >
+ void operator() (
+ const image_type& img,
+ std::vector<rect_detection>& dets,
+ double adjust_threshold = 0
+ );
+ /*!
+ requires
+ - img == an object which can be accepted by image_scanner_type::load()
+ ensures
+ - Performs object detection on the given image and stores the detected
+ objects into #dets. In particular, we will have that:
+ - #dets is sorted such that the highest confidence detections come
+ first. E.g. element 0 is the best detection, element 1 the next
+ best, and so on.
+ - #dets.size() == the number of detected objects.
+ - #dets[i].detection_confidence == The strength of the i-th detection.
+ Larger values indicate that the detector is more confident that
+ #dets[i] is a correct detection rather than being a false alarm.
+ Moreover, the detection_confidence is equal to the detection value
+ output by the scanner minus the threshold value stored at the end of
+ the weight vector in get_w(#dets[i].weight_index).
+ - #dets[i].weight_index == the index for the weight vector that
+ generated this detection.
+ - #dets[i].rect == the bounding box for the i-th detection.
+ - #get_scanner() will have been loaded with img. Therefore, you can call
+ #get_scanner().get_feature_vector() to obtain the feature vectors or
+ #get_scanner().get_full_object_detection() to get the
+ full_object_detections for the resulting object detection boxes.
+ - The detection threshold is adjusted by having adjust_threshold added to
+ it. Therefore, an adjust_threshold value > 0 makes detecting objects
+ harder while a negative value makes it easier. Moreover, the following
+ will be true for all valid i:
+ - #dets[i].detection_confidence >= adjust_threshold
+ This means that, for example, you can obtain the maximum possible number
+ of detections by setting adjust_threshold equal to negative infinity.
+ !*/
+
+ template <
+ typename image_type
+ >
+ void operator() (
+ const image_type& img,
+ std::vector<full_detection>& dets,
+ double adjust_threshold = 0
+ );
+ /*!
+ requires
+ - img == an object which can be accepted by image_scanner_type::load()
+ ensures
+ - This function is identical to the above operator() routine, except that
+ it outputs full_object_detections instead of rectangles. This means that
+ the output includes part locations. In particular, calling this function
+ is the same as calling the above operator() routine and then using
+ get_scanner().get_full_object_detection() to resolve all the rectangles
+ into full_object_detections. Therefore, this version of operator() is
+ simply a convenience function for performing this set of operations.
+ !*/
+
+ template <
+ typename image_type
+ >
+ std::vector<rectangle> operator() (
+ const image_type& img,
+ const adjust_threshold = 0
+ );
+ /*!
+ requires
+ - img == an object which can be accepted by image_scanner_type::load()
+ ensures
+ - This function is identical to the above operator() routine, except that
+ it returns a std::vector<rectangle> which contains just the bounding
+ boxes of all the detections.
+ !*/
+
+ template <
+ typename image_type
+ >
+ void operator() (
+ const image_type& img,
+ std::vector<std::pair<double, rectangle> >& dets,
+ double adjust_threshold = 0
+ );
+ /*!
+ requires
+ - img == an object which can be accepted by image_scanner_type::load()
+ ensures
+ - performs object detection on the given image and stores the
+ detected objects into #dets. In particular, we will have that:
+ - #dets is sorted such that the highest confidence detections
+ come first. E.g. element 0 is the best detection, element 1
+ the next best, and so on.
+ - #dets.size() == the number of detected objects.
+ - #dets[i].first gives the "detection confidence", of the i-th
+ detection. This is the detection value output by the scanner minus
+ the threshold value stored at the end of the weight vector in get_w().
+ - #dets[i].second == the bounding box for the i-th detection.
+ - #get_scanner() will have been loaded with img. Therefore, you can call
+ #get_scanner().get_feature_vector() to obtain the feature vectors or
+ #get_scanner().get_full_object_detection() to get the
+ full_object_detections for the resulting object detection boxes.
+ - The detection threshold is adjusted by having adjust_threshold added to
+ it. Therefore, an adjust_threshold value > 0 makes detecting objects
+ harder while a negative value makes it easier. Moreover, the following
+ will be true for all valid i:
+ - #dets[i].first >= adjust_threshold
+ This means that, for example, you can obtain the maximum possible number
+ of detections by setting adjust_threshold equal to negative infinity.
+ !*/
+
+ template <
+ typename image_type
+ >
+ void operator() (
+ const image_type& img,
+ std::vector<std::pair<double, full_object_detection> >& dets,
+ double adjust_threshold = 0
+ );
+ /*!
+ requires
+ - img == an object which can be accepted by image_scanner_type::load()
+ ensures
+ - This function is identical to the above operator() routine, except that
+ it outputs full_object_detections instead of rectangles. This means that
+ the output includes part locations. In particular, calling this function
+ is the same as calling the above operator() routine and then using
+ get_scanner().get_full_object_detection() to resolve all the rectangles
+ into full_object_detections. Therefore, this version of operator() is
+ simply a convenience function for performing this set of operations.
+ !*/
+
+ template <
+ typename image_type
+ >
+ void operator() (
+ const image_type& img,
+ std::vector<full_object_detection>& dets,
+ double adjust_threshold = 0
+ );
+ /*!
+ requires
+ - img == an object which can be accepted by image_scanner_type::load()
+ ensures
+ - This function is identical to the above operator() routine, except that
+ it doesn't include a double valued score. That is, it just outputs the
+ full_object_detections.
+ !*/
+ };
+
+// ----------------------------------------------------------------------------------------
+
+ template <typename T>
+ void serialize (
+ const object_detector<T>& item,
+ std::ostream& out
+ );
+ /*!
+ provides serialization support. Note that this function only saves the
+ configuration part of item.get_scanner(). That is, we use the scanner's
+ copy_configuration() function to get a copy of the scanner that doesn't contain any
+ loaded image data and we then save just the configuration part of the scanner.
+ This means that any serialized object_detectors won't remember any images they have
+ processed but will otherwise contain all their state and be able to detect objects
+ in new images.
+ !*/
+
+// ----------------------------------------------------------------------------------------
+
+ template <typename T>
+ void deserialize (
+ object_detector<T>& item,
+ std::istream& in
+ );
+ /*!
+ provides deserialization support
+ !*/
+
+// ----------------------------------------------------------------------------------------
+
+}
+
+#endif // DLIB_OBJECT_DeTECTOR_ABSTRACT_Hh_
+
diff --git a/ml/dlib/dlib/image_processing/remove_unobtainable_rectangles.h b/ml/dlib/dlib/image_processing/remove_unobtainable_rectangles.h
new file mode 100644
index 000000000..95ab4f353
--- /dev/null
+++ b/ml/dlib/dlib/image_processing/remove_unobtainable_rectangles.h
@@ -0,0 +1,317 @@
+// Copyright (C) 2013 Davis E. King (davis@dlib.net)
+// License: Boost Software License See LICENSE.txt for the full license.
+#ifndef DLIB_REMOVE_UnOBTAINABLE_RECTANGLES_Hh_
+#define DLIB_REMOVE_UnOBTAINABLE_RECTANGLES_Hh_
+
+#include "remove_unobtainable_rectangles_abstract.h"
+#include "scan_image_pyramid.h"
+#include "scan_image_boxes.h"
+#include "scan_image_custom.h"
+#include "scan_fhog_pyramid.h"
+#include "../svm/structural_object_detection_trainer.h"
+#include "../geometry.h"
+
+
+namespace dlib
+{
+
+// ----------------------------------------------------------------------------------------
+
+ namespace impl
+ {
+ inline bool matches_rect (
+ const std::vector<rectangle>& rects,
+ const rectangle& rect,
+ const double eps
+ )
+ {
+ for (unsigned long i = 0; i < rects.size(); ++i)
+ {
+ const double score = (rect.intersect(rects[i])).area()/(double)(rect+rects[i]).area();
+ if (score > eps)
+ return true;
+ }
+
+ return false;
+ }
+
+ inline rectangle get_best_matching_rect (
+ const std::vector<rectangle>& rects,
+ const rectangle& rect
+ )
+ {
+ double best_score = -1;
+ rectangle best_rect;
+ for (unsigned long i = 0; i < rects.size(); ++i)
+ {
+ const double score = (rect.intersect(rects[i])).area()/(double)(rect+rects[i]).area();
+ if (score > best_score)
+ {
+ best_score = score;
+ best_rect = rects[i];
+ }
+ }
+ return best_rect;
+ }
+
+ // ------------------------------------------------------------------------------------
+
+ template <
+ typename image_array_type,
+ typename image_scanner_type
+ >
+ std::vector<std::vector<rectangle> > pyramid_remove_unobtainable_rectangles (
+ const structural_object_detection_trainer<image_scanner_type>& trainer,
+ const image_array_type& images,
+ std::vector<std::vector<rectangle> >& object_locations
+ )
+ {
+ using namespace dlib::impl;
+ // make sure requires clause is not broken
+ DLIB_ASSERT(images.size() == object_locations.size(),
+ "\t std::vector<std::vector<rectangle>> remove_unobtainable_rectangles()"
+ << "\n\t Invalid inputs were given to this function."
+ );
+
+
+ std::vector<std::vector<rectangle> > rejects(images.size());
+
+ // If the trainer is setup to automatically fit the overlap tester to the data then
+ // we should use the loosest possible overlap tester here. Otherwise we should use
+ // the tester the trainer will use.
+ test_box_overlap boxes_overlap(0.9999999,1);
+ if (!trainer.auto_set_overlap_tester())
+ boxes_overlap = trainer.get_overlap_tester();
+
+ for (unsigned long k = 0; k < images.size(); ++k)
+ {
+ std::vector<rectangle> objs = object_locations[k];
+
+ // First remove things that don't have any matches with the candidate object
+ // locations.
+ std::vector<rectangle> good_rects;
+ for (unsigned long j = 0; j < objs.size(); ++j)
+ {
+ const rectangle rect = trainer.get_scanner().get_best_matching_rect(objs[j]);
+ const double score = (objs[j].intersect(rect)).area()/(double)(objs[j] + rect).area();
+ if (score > trainer.get_match_eps())
+ good_rects.push_back(objs[j]);
+ else
+ rejects[k].push_back(objs[j]);
+ }
+ object_locations[k] = good_rects;
+
+
+ // Remap these rectangles to the ones that can come out of the scanner. That
+ // way when we compare them to each other in the following loop we will know if
+ // any distinct truth rectangles get mapped to overlapping boxes.
+ objs.resize(good_rects.size());
+ for (unsigned long i = 0; i < good_rects.size(); ++i)
+ objs[i] = trainer.get_scanner().get_best_matching_rect(good_rects[i]);
+
+ good_rects.clear();
+ // now check for truth rects that are too close together.
+ for (unsigned long i = 0; i < objs.size(); ++i)
+ {
+ // check if objs[i] hits another box
+ bool hit_box = false;
+ for (unsigned long j = i+1; j < objs.size(); ++j)
+ {
+ if (boxes_overlap(objs[i], objs[j]))
+ {
+ hit_box = true;
+ break;
+ }
+ }
+ if (hit_box)
+ rejects[k].push_back(object_locations[k][i]);
+ else
+ good_rects.push_back(object_locations[k][i]);
+ }
+ object_locations[k] = good_rects;
+ }
+
+ return rejects;
+ }
+
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename image_array_type,
+ typename Pyramid_type,
+ typename Feature_extractor_type
+ >
+ std::vector<std::vector<rectangle> > remove_unobtainable_rectangles (
+ const structural_object_detection_trainer<scan_image_pyramid<Pyramid_type, Feature_extractor_type> >& trainer,
+ const image_array_type& images,
+ std::vector<std::vector<rectangle> >& object_locations
+ )
+ {
+ return impl::pyramid_remove_unobtainable_rectangles(trainer, images, object_locations);
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename image_array_type,
+ typename Pyramid_type,
+ typename Feature_extractor_type
+ >
+ std::vector<std::vector<rectangle> > remove_unobtainable_rectangles (
+ const structural_object_detection_trainer<scan_fhog_pyramid<Pyramid_type,Feature_extractor_type> >& trainer,
+ const image_array_type& images,
+ std::vector<std::vector<rectangle> >& object_locations
+ )
+ {
+ return impl::pyramid_remove_unobtainable_rectangles(trainer, images, object_locations);
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ namespace impl
+ {
+ template <
+ typename image_array_type,
+ typename scanner_type,
+ typename get_boxes_functor
+ >
+ std::vector<std::vector<rectangle> > remove_unobtainable_rectangles (
+ get_boxes_functor& bg,
+ const structural_object_detection_trainer<scanner_type>& trainer,
+ const image_array_type& images,
+ std::vector<std::vector<rectangle> >& object_locations
+ )
+ {
+ using namespace dlib::impl;
+ // make sure requires clause is not broken
+ DLIB_ASSERT(images.size() == object_locations.size(),
+ "\t std::vector<std::vector<rectangle>> remove_unobtainable_rectangles()"
+ << "\n\t Invalid inputs were given to this function."
+ );
+
+ std::vector<rectangle> rects;
+
+ std::vector<std::vector<rectangle> > rejects(images.size());
+
+ // If the trainer is setup to automatically fit the overlap tester to the data then
+ // we should use the loosest possible overlap tester here. Otherwise we should use
+ // the tester the trainer will use.
+ test_box_overlap boxes_overlap(0.9999999,1);
+ if (!trainer.auto_set_overlap_tester())
+ boxes_overlap = trainer.get_overlap_tester();
+
+ for (unsigned long k = 0; k < images.size(); ++k)
+ {
+ std::vector<rectangle> objs = object_locations[k];
+ // Don't even bother computing the candidate rectangles if there aren't any
+ // object locations for this image since there isn't anything to do anyway.
+ if (objs.size() == 0)
+ continue;
+
+ bg(images[k], rects);
+
+
+ // First remove things that don't have any matches with the candidate object
+ // locations.
+ std::vector<rectangle> good_rects;
+ for (unsigned long j = 0; j < objs.size(); ++j)
+ {
+ if (matches_rect(rects, objs[j], trainer.get_match_eps()))
+ good_rects.push_back(objs[j]);
+ else
+ rejects[k].push_back(objs[j]);
+ }
+ object_locations[k] = good_rects;
+
+
+ // Remap these rectangles to the ones that can come out of the scanner. That
+ // way when we compare them to each other in the following loop we will know if
+ // any distinct truth rectangles get mapped to overlapping boxes.
+ objs.resize(good_rects.size());
+ for (unsigned long i = 0; i < good_rects.size(); ++i)
+ objs[i] = get_best_matching_rect(rects, good_rects[i]);
+
+ good_rects.clear();
+ // now check for truth rects that are too close together.
+ for (unsigned long i = 0; i < objs.size(); ++i)
+ {
+ // check if objs[i] hits another box
+ bool hit_box = false;
+ for (unsigned long j = i+1; j < objs.size(); ++j)
+ {
+ if (boxes_overlap(objs[i], objs[j]))
+ {
+ hit_box = true;
+ break;
+ }
+ }
+ if (hit_box)
+ rejects[k].push_back(object_locations[k][i]);
+ else
+ good_rects.push_back(object_locations[k][i]);
+ }
+ object_locations[k] = good_rects;
+ }
+
+ return rejects;
+ }
+
+ // ----------------------------------------------------------------------------------------
+
+ template <typename T>
+ struct load_to_functor
+ {
+ load_to_functor(T& obj_) : obj(obj_) {}
+ T& obj;
+
+ template <typename U, typename V>
+ void operator()(const U& u, V& v)
+ {
+ obj.load(u,v);
+ }
+ };
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename image_array_type,
+ typename feature_extractor,
+ typename box_generator
+ >
+ std::vector<std::vector<rectangle> > remove_unobtainable_rectangles (
+ const structural_object_detection_trainer<scan_image_boxes<feature_extractor, box_generator> >& trainer,
+ const image_array_type& images,
+ std::vector<std::vector<rectangle> >& object_locations
+ )
+ {
+ box_generator bg = trainer.get_scanner().get_box_generator();
+ return impl::remove_unobtainable_rectangles(bg, trainer, images, object_locations);
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename image_array_type,
+ typename feature_extractor
+ >
+ std::vector<std::vector<rectangle> > remove_unobtainable_rectangles (
+ const structural_object_detection_trainer<scan_image_custom<feature_extractor> >& trainer,
+ const image_array_type& images,
+ std::vector<std::vector<rectangle> >& object_locations
+ )
+ {
+ feature_extractor fe;
+ fe.copy_configuration(trainer.get_scanner().get_feature_extractor());
+ impl::load_to_functor<feature_extractor> bg(fe);
+ return impl::remove_unobtainable_rectangles(bg, trainer, images, object_locations);
+ }
+
+// ----------------------------------------------------------------------------------------
+
+}
+
+#endif // DLIB_REMOVE_UnOBTAINABLE_RECTANGLES_Hh_
+
diff --git a/ml/dlib/dlib/image_processing/remove_unobtainable_rectangles_abstract.h b/ml/dlib/dlib/image_processing/remove_unobtainable_rectangles_abstract.h
new file mode 100644
index 000000000..328326f1c
--- /dev/null
+++ b/ml/dlib/dlib/image_processing/remove_unobtainable_rectangles_abstract.h
@@ -0,0 +1,56 @@
+// Copyright (C) 2013 Davis E. King (davis@dlib.net)
+// License: Boost Software License See LICENSE.txt for the full license.
+#undef DLIB_REMOVE_UnOBTAINABLE_RECTANGLES_ABSTRACT_Hh_
+#ifdef DLIB_REMOVE_UnOBTAINABLE_RECTANGLES_ABSTRACT_Hh_
+
+#include "scan_image_pyramid_abstract.h"
+#include "scan_image_boxes_abstract.h"
+#include "scan_image_custom_abstract.h"
+#include "scan_fhog_pyramid_abstract.h"
+#include "../svm/structural_object_detection_trainer_abstract.h"
+#include "../geometry.h"
+
+
+namespace dlib
+{
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename image_scanner_type,
+ typename image_array_type
+ >
+ std::vector<std::vector<rectangle> > remove_unobtainable_rectangles (
+ const structural_object_detection_trainer<image_scanner_type>& trainer,
+ const image_array_type& images,
+ std::vector<std::vector<rectangle> >& object_locations
+ );
+ /*!
+ requires
+ - image_scanner_type must be either scan_image_boxes, scan_image_pyramid,
+ scan_image_custom, or scan_fhog_pyramid.
+ - images.size() == object_locations.size()
+ ensures
+ - Recall that the image scanner objects can't produce all possible rectangles
+ as object detections since they only consider a limited subset of all possible
+ object positions. Moreover, the structural_object_detection_trainer requires
+ its input training data to not contain any object positions which are unobtainable
+ by its scanner object. Therefore, remove_unobtainable_rectangles() is a tool
+ to filter out these unobtainable rectangles from the training data before giving
+ it to a structural_object_detection_trainer.
+ - This function interprets object_locations[i] as the set of object positions for
+ image[i], for all valid i.
+ - In particular, this function removes unobtainable rectangles from object_locations
+ and also returns a vector V such that:
+ - V.size() == object_locations.size()
+ - for all valid i:
+ - V[i] == the set of rectangles removed from object_locations[i]
+ !*/
+
+// ----------------------------------------------------------------------------------------
+
+}
+
+#endif // DLIB_REMOVE_UnOBTAINABLE_RECTANGLES_ABSTRACT_Hh_
+
+
diff --git a/ml/dlib/dlib/image_processing/render_face_detections.h b/ml/dlib/dlib/image_processing/render_face_detections.h
new file mode 100644
index 000000000..96ff8971f
--- /dev/null
+++ b/ml/dlib/dlib/image_processing/render_face_detections.h
@@ -0,0 +1,99 @@
+// Copyright (C) 2014 Davis E. King (davis@dlib.net)
+// License: Boost Software License See LICENSE.txt for the full license.
+#ifndef DLIB_RENDER_FACE_DeTECTIONS_H_
+#define DLIB_RENDER_FACE_DeTECTIONS_H_
+
+#include "full_object_detection.h"
+#include "../gui_widgets.h"
+#include "render_face_detections_abstract.h"
+#include <vector>
+
+namespace dlib
+{
+ inline std::vector<image_window::overlay_line> render_face_detections (
+ const std::vector<full_object_detection>& dets,
+ const rgb_pixel color = rgb_pixel(0,255,0)
+ )
+ {
+ std::vector<image_window::overlay_line> lines;
+ for (unsigned long i = 0; i < dets.size(); ++i)
+ {
+ DLIB_CASSERT(dets[i].num_parts() == 68 || dets[i].num_parts() == 5,
+ "\t std::vector<image_window::overlay_line> render_face_detections()"
+ << "\n\t You have to give either a 5 point or 68 point face landmarking output to this function. "
+ << "\n\t dets["<<i<<"].num_parts(): " << dets[i].num_parts()
+ );
+
+ const full_object_detection& d = dets[i];
+
+ if (d.num_parts() == 5)
+ {
+ lines.push_back(image_window::overlay_line(d.part(0), d.part(1), color));
+ lines.push_back(image_window::overlay_line(d.part(1), d.part(4), color));
+ lines.push_back(image_window::overlay_line(d.part(4), d.part(3), color));
+ lines.push_back(image_window::overlay_line(d.part(3), d.part(2), color));
+ }
+ else
+ {
+ // Around Chin. Ear to Ear
+ for (unsigned long i = 1; i <= 16; ++i)
+ lines.push_back(image_window::overlay_line(d.part(i), d.part(i-1), color));
+
+ // Line on top of nose
+ for (unsigned long i = 28; i <= 30; ++i)
+ lines.push_back(image_window::overlay_line(d.part(i), d.part(i-1), color));
+
+ // left eyebrow
+ for (unsigned long i = 18; i <= 21; ++i)
+ lines.push_back(image_window::overlay_line(d.part(i), d.part(i-1), color));
+ // Right eyebrow
+ for (unsigned long i = 23; i <= 26; ++i)
+ lines.push_back(image_window::overlay_line(d.part(i), d.part(i-1), color));
+ // Bottom part of the nose
+ for (unsigned long i = 31; i <= 35; ++i)
+ lines.push_back(image_window::overlay_line(d.part(i), d.part(i-1), color));
+ // Line from the nose to the bottom part above
+ lines.push_back(image_window::overlay_line(d.part(30), d.part(35), color));
+
+ // Left eye
+ for (unsigned long i = 37; i <= 41; ++i)
+ lines.push_back(image_window::overlay_line(d.part(i), d.part(i-1), color));
+ lines.push_back(image_window::overlay_line(d.part(36), d.part(41), color));
+
+ // Right eye
+ for (unsigned long i = 43; i <= 47; ++i)
+ lines.push_back(image_window::overlay_line(d.part(i), d.part(i-1), color));
+ lines.push_back(image_window::overlay_line(d.part(42), d.part(47), color));
+
+ // Lips outer part
+ for (unsigned long i = 49; i <= 59; ++i)
+ lines.push_back(image_window::overlay_line(d.part(i), d.part(i-1), color));
+ lines.push_back(image_window::overlay_line(d.part(48), d.part(59), color));
+
+ // Lips inside part
+ for (unsigned long i = 61; i <= 67; ++i)
+ lines.push_back(image_window::overlay_line(d.part(i), d.part(i-1), color));
+ lines.push_back(image_window::overlay_line(d.part(60), d.part(67), color));
+ }
+ }
+ return lines;
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ inline std::vector<image_window::overlay_line> render_face_detections (
+ const full_object_detection& det,
+ const rgb_pixel color = rgb_pixel(0,255,0)
+ )
+ {
+ std::vector<full_object_detection> dets;
+ dets.push_back(det);
+ return render_face_detections(dets, color);
+ }
+
+// ----------------------------------------------------------------------------------------
+
+}
+
+#endif // DLIB_RENDER_FACE_DeTECTIONS_H_
+
diff --git a/ml/dlib/dlib/image_processing/render_face_detections_abstract.h b/ml/dlib/dlib/image_processing/render_face_detections_abstract.h
new file mode 100644
index 000000000..f609c8e8c
--- /dev/null
+++ b/ml/dlib/dlib/image_processing/render_face_detections_abstract.h
@@ -0,0 +1,59 @@
+// Copyright (C) 2014 Davis E. King (davis@dlib.net)
+// License: Boost Software License See LICENSE.txt for the full license.
+#undef DLIB_RENDER_FACE_DeTECTIONS_ABSTRACT_H_
+#ifdef DLIB_RENDER_FACE_DeTECTIONS_ABSTRACT_H_
+
+#include "full_object_detection_abstract.h"
+#include "../gui_widgets.h"
+
+namespace dlib
+{
+
+// ----------------------------------------------------------------------------------------
+
+ inline std::vector<image_window::overlay_line> render_face_detections (
+ const std::vector<full_object_detection>& dets,
+ const rgb_pixel color = rgb_pixel(0,255,0)
+ );
+ /*!
+ requires
+ - for all valid i:
+ - dets[i].num_parts() == 68 || dets[i].num_parts() == 5
+ ensures
+ - Interprets the given objects as face detections with parts annotated using
+ either the iBUG face landmark scheme or a 5 point face annotation. We then
+ return a set of overlay lines that will draw the objects onto the screen in a
+ way that properly draws the outline of the face features defined by the part
+ locations.
+ - returns a vector with dets.size() elements, each containing the lines
+ necessary to render a face detection from dets.
+ - The 5 point face annotation scheme is assumed to be:
+ - det part 0 == left eye corner, outside part of eye.
+ - det part 1 == left eye corner, inside part of eye.
+ - det part 2 == right eye corner, outside part of eye.
+ - det part 3 == right eye corner, inside part of eye.
+ - det part 4 == immediately under the nose, right at the top of the philtrum.
+ !*/
+
+// ----------------------------------------------------------------------------------------
+
+ inline std::vector<image_window::overlay_line> render_face_detections (
+ const full_object_detection& det,
+ const rgb_pixel color = rgb_pixel(0,255,0)
+ );
+ /*!
+ requires
+ - det.num_parts() == 68 || det.num_parts() == 5
+ ensures
+ - This function is identical to the above render_face_detections() routine
+ except that it takes just a single full_object_detection instead of a
+ std::vector of them.
+ !*/
+
+// ----------------------------------------------------------------------------------------
+
+}
+
+#endif // DLIB_RENDER_FACE_DeTECTIONS_ABSTRACT_H_
+
+
diff --git a/ml/dlib/dlib/image_processing/scan_fhog_pyramid.h b/ml/dlib/dlib/image_processing/scan_fhog_pyramid.h
new file mode 100644
index 000000000..5ae0310af
--- /dev/null
+++ b/ml/dlib/dlib/image_processing/scan_fhog_pyramid.h
@@ -0,0 +1,1348 @@
+// Copyright (C) 2013 Davis E. King (davis@dlib.net)
+// License: Boost Software License See LICENSE.txt for the full license.
+#ifndef DLIB_SCAN_fHOG_PYRAMID_Hh_
+#define DLIB_SCAN_fHOG_PYRAMID_Hh_
+
+#include "scan_fhog_pyramid_abstract.h"
+#include "../matrix.h"
+#include "../image_transforms.h"
+#include "../array.h"
+#include "../array2d.h"
+#include "object_detector.h"
+
+namespace dlib
+{
+
+// ----------------------------------------------------------------------------------------
+
+ class default_fhog_feature_extractor
+ {
+ public:
+ inline rectangle image_to_feats (
+ const rectangle& rect,
+ int cell_size,
+ int filter_rows_padding,
+ int filter_cols_padding
+ ) const
+ {
+ return image_to_fhog(rect, cell_size, filter_rows_padding, filter_cols_padding);
+ }
+
+ inline rectangle feats_to_image (
+ const rectangle& rect,
+ int cell_size,
+ int filter_rows_padding,
+ int filter_cols_padding
+ ) const
+ {
+ return fhog_to_image(rect, cell_size, filter_rows_padding, filter_cols_padding);
+ }
+
+ template <
+ typename image_type
+ >
+ void operator()(
+ const image_type& img,
+ dlib::array<array2d<float> >& hog,
+ int cell_size,
+ int filter_rows_padding,
+ int filter_cols_padding
+ ) const
+ {
+ extract_fhog_features(img,hog,cell_size,filter_rows_padding,filter_cols_padding);
+ }
+
+ inline unsigned long get_num_planes (
+ ) const
+ {
+ return 31;
+ }
+ };
+
+ inline void serialize (const default_fhog_feature_extractor&, std::ostream&) {}
+ inline void deserialize (default_fhog_feature_extractor&, std::istream&) {}
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename Pyramid_type,
+ typename Feature_extractor_type = default_fhog_feature_extractor
+ >
+ class scan_fhog_pyramid : noncopyable
+ {
+
+ public:
+
+ typedef matrix<double,0,1> feature_vector_type;
+
+ typedef Pyramid_type pyramid_type;
+ typedef Feature_extractor_type feature_extractor_type;
+
+ scan_fhog_pyramid (
+ );
+
+ explicit scan_fhog_pyramid (
+ const feature_extractor_type& fe_
+ );
+
+ template <
+ typename image_type
+ >
+ void load (
+ const image_type& img
+ );
+
+ inline bool is_loaded_with_image (
+ ) const;
+
+ inline void copy_configuration (
+ const scan_fhog_pyramid& item
+ );
+
+ void set_detection_window_size (
+ unsigned long width,
+ unsigned long height
+ )
+ {
+ // make sure requires clause is not broken
+ DLIB_ASSERT(width > 0 && height > 0,
+ "\t void scan_fhog_pyramid::set_detection_window_size()"
+ << "\n\t Invalid inputs were given to this function "
+ << "\n\t width: " << width
+ << "\n\t height: " << height
+ << "\n\t this: " << this
+ );
+
+ window_width = width;
+ window_height = height;
+ feats.clear();
+ }
+
+ inline unsigned long get_detection_window_width (
+ ) const { return window_width; }
+ inline unsigned long get_detection_window_height (
+ ) const { return window_height; }
+
+ inline unsigned long get_num_detection_templates (
+ ) const;
+
+ inline unsigned long get_num_movable_components_per_detection_template (
+ ) const;
+
+ void set_padding (
+ unsigned long new_padding
+ )
+ {
+ padding = new_padding;
+ feats.clear();
+ }
+
+ unsigned long get_padding (
+ ) const { return padding; }
+
+ void set_cell_size (
+ unsigned long new_cell_size
+ )
+ {
+ // make sure requires clause is not broken
+ DLIB_ASSERT(new_cell_size > 0 ,
+ "\t void scan_fhog_pyramid::set_cell_size()"
+ << "\n\t You can't have zero sized fHOG cells. "
+ << "\n\t this: " << this
+ );
+
+ cell_size = new_cell_size;
+ feats.clear();
+ }
+
+ unsigned long get_cell_size (
+ ) const { return cell_size; }
+
+ inline long get_num_dimensions (
+ ) const;
+
+ unsigned long get_max_pyramid_levels (
+ ) const;
+
+ const feature_extractor_type& get_feature_extractor(
+ ) const { return fe; }
+
+ void set_max_pyramid_levels (
+ unsigned long max_levels
+ );
+
+ void set_min_pyramid_layer_size (
+ unsigned long width,
+ unsigned long height
+ );
+
+ inline unsigned long get_min_pyramid_layer_width (
+ ) const;
+
+ inline unsigned long get_min_pyramid_layer_height (
+ ) const;
+
+ void detect (
+ const feature_vector_type& w,
+ std::vector<std::pair<double, rectangle> >& dets,
+ const double thresh
+ ) const
+ {
+ // make sure requires clause is not broken
+ DLIB_ASSERT(is_loaded_with_image() &&
+ w.size() >= get_num_dimensions(),
+ "\t void scan_fhog_pyramid::detect()"
+ << "\n\t Invalid inputs were given to this function "
+ << "\n\t is_loaded_with_image(): " << is_loaded_with_image()
+ << "\n\t w.size(): " << w.size()
+ << "\n\t get_num_dimensions(): " << get_num_dimensions()
+ << "\n\t this: " << this
+ );
+
+ fhog_filterbank temp = build_fhog_filterbank(w);
+ detect(temp, dets, thresh);
+ }
+
+ class fhog_filterbank
+ {
+ friend class scan_fhog_pyramid;
+ public:
+ inline long get_num_dimensions() const
+ {
+ unsigned long dims = 0;
+ for (unsigned long i = 0; i < filters.size(); ++i)
+ {
+ dims += filters[i].size();
+ }
+ return dims;
+ }
+
+ const std::vector<matrix<float> >& get_filters() const { return filters;}
+
+ unsigned long num_separable_filters() const
+ {
+ unsigned long num = 0;
+ for (unsigned long i = 0; i < row_filters.size(); ++i)
+ {
+ num += row_filters[i].size();
+ }
+ return num;
+ }
+
+ std::vector<matrix<float> > filters;
+ std::vector<std::vector<matrix<float,0,1> > > row_filters, col_filters;
+ };
+
+ fhog_filterbank build_fhog_filterbank (
+ const feature_vector_type& weights
+ ) const
+ {
+ // make sure requires clause is not broken
+ DLIB_ASSERT(weights.size() >= get_num_dimensions(),
+ "\t fhog_filterbank scan_fhog_pyramid::build_fhog_filterbank()"
+ << "\n\t The number of weights isn't enough to fill out the filterbank. "
+ << "\n\t weights.size(): " << weights.size()
+ << "\n\t get_num_dimensions(): " << get_num_dimensions()
+ << "\n\t this: " << this
+ );
+
+ fhog_filterbank temp;
+ temp.filters.resize(fe.get_num_planes());
+ temp.row_filters.resize(fe.get_num_planes());
+ temp.col_filters.resize(fe.get_num_planes());
+
+ // load filters from w
+ unsigned long width, height;
+ compute_fhog_window_size(width, height);
+ const long size = width*height;
+ for (unsigned long i = 0; i < temp.filters.size(); ++i)
+ {
+ matrix<double> u,v,w,f;
+ f = reshape(rowm(weights, range(i*size, (i+1)*size-1)), height, width);
+ temp.filters[i] = matrix_cast<float>(f);
+
+ svd3(f, u,w,v);
+
+ matrix<double> w2 = w;
+ rsort_columns(u,w);
+ rsort_columns(v,w2);
+
+ double thresh = std::max(1e-4, max(w)*0.001);
+ w = round_zeros(w, thresh);
+
+
+ for (long j = 0; j < w.size(); ++j)
+ {
+ if (w(j) != 0)
+ {
+ temp.col_filters[i].push_back(matrix_cast<float>(colm(u,j)*std::sqrt(w(j))));
+ temp.row_filters[i].push_back(matrix_cast<float>(colm(v,j)*std::sqrt(w(j))));
+ }
+ }
+ }
+
+ return temp;
+ }
+
+ void detect (
+ const fhog_filterbank& w,
+ std::vector<std::pair<double, rectangle> >& dets,
+ const double thresh
+ ) const;
+
+
+ void get_feature_vector (
+ const full_object_detection& obj,
+ feature_vector_type& psi
+ ) const;
+
+ full_object_detection get_full_object_detection (
+ const rectangle& rect,
+ const feature_vector_type& w
+ ) const;
+
+ const rectangle get_best_matching_rect (
+ const rectangle& rect
+ ) const;
+
+ double get_nuclear_norm_regularization_strength (
+ ) const { return nuclear_norm_regularization_strength; }
+
+ void set_nuclear_norm_regularization_strength (
+ double strength
+ )
+ {
+ // make sure requires clause is not broken
+ DLIB_ASSERT(strength >= 0 ,
+ "\t void scan_fhog_pyramid::set_nuclear_norm_regularization_strength()"
+ << "\n\t You can't have a negative regularization strength."
+ << "\n\t strength: " << strength
+ << "\n\t this: " << this
+ );
+
+ nuclear_norm_regularization_strength = strength;
+ }
+
+ unsigned long get_fhog_window_width (
+ ) const
+ {
+ unsigned long width, height;
+ compute_fhog_window_size(width, height);
+ return width;
+ }
+
+ unsigned long get_fhog_window_height (
+ ) const
+ {
+ unsigned long width, height;
+ compute_fhog_window_size(width, height);
+ return height;
+ }
+
+ template <typename T, typename U>
+ friend void serialize (
+ const scan_fhog_pyramid<T,U>& item,
+ std::ostream& out
+ );
+
+ template <typename T, typename U>
+ friend void deserialize (
+ scan_fhog_pyramid<T,U>& item,
+ std::istream& in
+ );
+
+ private:
+ inline void compute_fhog_window_size(
+ unsigned long& width,
+ unsigned long& height
+ ) const
+ {
+ const rectangle rect = centered_rect(point(0,0),window_width,window_height);
+ const rectangle temp = grow_rect(fe.image_to_feats(rect, cell_size, 1, 1), padding);
+ width = temp.width();
+ height = temp.height();
+ }
+
+ void get_mapped_rect_and_metadata (
+ const unsigned long number_pyramid_levels,
+ const rectangle& rect,
+ rectangle& mapped_rect,
+ rectangle& fhog_rect,
+ unsigned long& best_level
+ ) const;
+
+ double get_match_score (
+ rectangle r1,
+ rectangle r2
+ ) const
+ {
+ // make the rectangles overlap as much as possible before computing the match score.
+ r1 = move_rect(r1, r2.tl_corner());
+ return (r1.intersect(r2).area())/(double)(r1 + r2).area();
+ }
+
+ typedef array<array2d<float> > fhog_image;
+
+ feature_extractor_type fe;
+ array<fhog_image> feats;
+ int cell_size;
+ unsigned long padding;
+ unsigned long window_width;
+ unsigned long window_height;
+ unsigned long max_pyramid_levels;
+ unsigned long min_pyramid_layer_width;
+ unsigned long min_pyramid_layer_height;
+ double nuclear_norm_regularization_strength;
+
+ void init()
+ {
+ cell_size = 8;
+ padding = 1;
+ window_width = 64;
+ window_height = 64;
+ max_pyramid_levels = 1000;
+ min_pyramid_layer_width = 64;
+ min_pyramid_layer_height = 64;
+ nuclear_norm_regularization_strength = 0;
+ }
+
+ };
+
+// ----------------------------------------------------------------------------------------
+
+ namespace impl
+ {
+ template <typename fhog_filterbank>
+ rectangle apply_filters_to_fhog (
+ const fhog_filterbank& w,
+ const array<array2d<float> >& feats,
+ array2d<float>& saliency_image
+ )
+ {
+ const unsigned long num_separable_filters = w.num_separable_filters();
+ rectangle area;
+ // use the separable filters if they would be faster than running the regular filters.
+ if (num_separable_filters > w.filters.size()*std::min(w.filters[0].nr(),w.filters[0].nc())/3.0)
+ {
+ area = spatially_filter_image(feats[0], saliency_image, w.filters[0]);
+ for (unsigned long i = 1; i < w.filters.size(); ++i)
+ {
+ // now we filter but the output adds to saliency_image rather than
+ // overwriting it.
+ spatially_filter_image(feats[i], saliency_image, w.filters[i], 1, false, true);
+ }
+ }
+ else
+ {
+ saliency_image.clear();
+ array2d<float> scratch;
+
+ // find the first filter to apply
+ unsigned long i = 0;
+ while (i < w.row_filters.size() && w.row_filters[i].size() == 0)
+ ++i;
+
+ for (; i < w.row_filters.size(); ++i)
+ {
+ for (unsigned long j = 0; j < w.row_filters[i].size(); ++j)
+ {
+ if (saliency_image.size() == 0)
+ area = float_spatially_filter_image_separable(feats[i], saliency_image, w.row_filters[i][j], w.col_filters[i][j],scratch,false);
+ else
+ area = float_spatially_filter_image_separable(feats[i], saliency_image, w.row_filters[i][j], w.col_filters[i][j],scratch,true);
+ }
+ }
+ if (saliency_image.size() == 0)
+ {
+ saliency_image.set_size(feats[0].nr(), feats[0].nc());
+ assign_all_pixels(saliency_image, 0);
+ }
+ }
+ return area;
+ }
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <typename T, typename U>
+ void serialize (
+ const scan_fhog_pyramid<T,U>& item,
+ std::ostream& out
+ )
+ {
+ int version = 1;
+ serialize(version, out);
+ serialize(item.fe, out);
+ serialize(item.feats, out);
+ serialize(item.cell_size, out);
+ serialize(item.padding, out);
+ serialize(item.window_width, out);
+ serialize(item.window_height, out);
+ serialize(item.max_pyramid_levels, out);
+ serialize(item.min_pyramid_layer_width, out);
+ serialize(item.min_pyramid_layer_height, out);
+ serialize(item.nuclear_norm_regularization_strength, out);
+ serialize(item.get_num_dimensions(), out);
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <typename T, typename U>
+ void deserialize (
+ scan_fhog_pyramid<T,U>& item,
+ std::istream& in
+ )
+ {
+ int version = 0;
+ deserialize(version, in);
+ if (version != 1)
+ throw serialization_error("Unsupported version found when deserializing a scan_fhog_pyramid object.");
+
+ deserialize(item.fe, in);
+ deserialize(item.feats, in);
+ deserialize(item.cell_size, in);
+ deserialize(item.padding, in);
+ deserialize(item.window_width, in);
+ deserialize(item.window_height, in);
+ deserialize(item.max_pyramid_levels, in);
+ deserialize(item.min_pyramid_layer_width, in);
+ deserialize(item.min_pyramid_layer_height, in);
+ deserialize(item.nuclear_norm_regularization_strength, in);
+
+ // When developing some feature extractor, it's easy to accidentally change its
+ // number of dimensions and then try to deserialize data from an older version of
+ // your extractor into the current code. This check is here to catch that kind of
+ // user error.
+ long dims;
+ deserialize(dims, in);
+ if (item.get_num_dimensions() != dims)
+ throw serialization_error("Number of dimensions in serialized scan_fhog_pyramid doesn't match the expected number.");
+ }
+
+// ----------------------------------------------------------------------------------------
+// ----------------------------------------------------------------------------------------
+// scan_fhog_pyramid member functions
+// ----------------------------------------------------------------------------------------
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename Pyramid_type,
+ typename feature_extractor_type
+ >
+ scan_fhog_pyramid<Pyramid_type,feature_extractor_type>::
+ scan_fhog_pyramid (
+ )
+ {
+ init();
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename Pyramid_type,
+ typename feature_extractor_type
+ >
+ scan_fhog_pyramid<Pyramid_type,feature_extractor_type>::
+ scan_fhog_pyramid (
+ const feature_extractor_type& fe_
+ )
+ {
+ init();
+ fe = fe_;
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ namespace impl
+ {
+ template <
+ typename pyramid_type,
+ typename image_type,
+ typename feature_extractor_type
+ >
+ void create_fhog_pyramid (
+ const image_type& img,
+ const feature_extractor_type& fe,
+ array<array<array2d<float> > >& feats,
+ int cell_size,
+ int filter_rows_padding,
+ int filter_cols_padding,
+ unsigned long min_pyramid_layer_width,
+ unsigned long min_pyramid_layer_height,
+ unsigned long max_pyramid_levels
+ )
+ {
+ unsigned long levels = 0;
+ rectangle rect = get_rect(img);
+
+ // figure out how many pyramid levels we should be using based on the image size
+ pyramid_type pyr;
+ do
+ {
+ rect = pyr.rect_down(rect);
+ ++levels;
+ } while (rect.width() >= min_pyramid_layer_width && rect.height() >= min_pyramid_layer_height &&
+ levels < max_pyramid_levels);
+
+ if (feats.max_size() < levels)
+ feats.set_max_size(levels);
+ feats.set_size(levels);
+
+
+
+ // build our feature pyramid
+ fe(img, feats[0], cell_size,filter_rows_padding,filter_cols_padding);
+ DLIB_ASSERT(feats[0].size() == fe.get_num_planes(),
+ "Invalid feature extractor used with dlib::scan_fhog_pyramid. The output does not have the \n"
+ "indicated number of planes.");
+
+ if (feats.size() > 1)
+ {
+ typedef typename image_traits<image_type>::pixel_type pixel_type;
+ array2d<pixel_type> temp1, temp2;
+ pyr(img, temp1);
+ fe(temp1, feats[1], cell_size,filter_rows_padding,filter_cols_padding);
+ swap(temp1,temp2);
+
+ for (unsigned long i = 2; i < feats.size(); ++i)
+ {
+ pyr(temp2, temp1);
+ fe(temp1, feats[i], cell_size,filter_rows_padding,filter_cols_padding);
+ swap(temp1,temp2);
+ }
+ }
+ }
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename Pyramid_type,
+ typename feature_extractor_type
+ >
+ template <
+ typename image_type
+ >
+ void scan_fhog_pyramid<Pyramid_type,feature_extractor_type>::
+ load (
+ const image_type& img
+ )
+ {
+ unsigned long width, height;
+ compute_fhog_window_size(width,height);
+ impl::create_fhog_pyramid<Pyramid_type>(img, fe, feats, cell_size, height,
+ width, min_pyramid_layer_width, min_pyramid_layer_height,
+ max_pyramid_levels);
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename Pyramid_type,
+ typename feature_extractor_type
+ >
+ bool scan_fhog_pyramid<Pyramid_type,feature_extractor_type>::
+ is_loaded_with_image (
+ ) const
+ {
+ return feats.size() != 0;
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename Pyramid_type,
+ typename feature_extractor_type
+ >
+ void scan_fhog_pyramid<Pyramid_type,feature_extractor_type>::
+ copy_configuration (
+ const scan_fhog_pyramid& item
+ )
+ {
+ cell_size = item.cell_size;
+ padding = item.padding;
+ window_width = item.window_width;
+ window_height = item.window_height;
+ max_pyramid_levels = item.max_pyramid_levels;
+ min_pyramid_layer_width = item.min_pyramid_layer_width;
+ min_pyramid_layer_height = item.min_pyramid_layer_height;
+ nuclear_norm_regularization_strength = item.nuclear_norm_regularization_strength;
+ fe = item.fe;
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename Pyramid_type,
+ typename feature_extractor_type
+ >
+ unsigned long scan_fhog_pyramid<Pyramid_type,feature_extractor_type>::
+ get_num_detection_templates (
+ ) const
+ {
+ return 1;
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename Pyramid_type,
+ typename feature_extractor_type
+ >
+ unsigned long scan_fhog_pyramid<Pyramid_type,feature_extractor_type>::
+ get_num_movable_components_per_detection_template (
+ ) const
+ {
+ return 0;
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename Pyramid_type,
+ typename feature_extractor_type
+ >
+ long scan_fhog_pyramid<Pyramid_type,feature_extractor_type>::
+ get_num_dimensions (
+ ) const
+ {
+ unsigned long width, height;
+ compute_fhog_window_size(width,height);
+ return width*height*fe.get_num_planes();
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename Pyramid_type,
+ typename feature_extractor_type
+ >
+ unsigned long scan_fhog_pyramid<Pyramid_type,feature_extractor_type>::
+ get_max_pyramid_levels (
+ ) const
+ {
+ return max_pyramid_levels;
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename Pyramid_type,
+ typename feature_extractor_type
+ >
+ void scan_fhog_pyramid<Pyramid_type,feature_extractor_type>::
+ set_max_pyramid_levels (
+ unsigned long max_levels
+ )
+ {
+ // make sure requires clause is not broken
+ DLIB_ASSERT(max_levels > 0 ,
+ "\t void scan_fhog_pyramid::set_max_pyramid_levels()"
+ << "\n\t You can't have zero levels. "
+ << "\n\t max_levels: " << max_levels
+ << "\n\t this: " << this
+ );
+
+ max_pyramid_levels = max_levels;
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ namespace impl
+ {
+ inline bool compare_pair_rect (
+ const std::pair<double, rectangle>& a,
+ const std::pair<double, rectangle>& b
+ )
+ {
+ return a.first < b.first;
+ }
+
+ template <
+ typename pyramid_type,
+ typename feature_extractor_type,
+ typename fhog_filterbank
+ >
+ void detect_from_fhog_pyramid (
+ const array<array<array2d<float> > >& feats,
+ const feature_extractor_type& fe,
+ const fhog_filterbank& w,
+ const double thresh,
+ const unsigned long det_box_height,
+ const unsigned long det_box_width,
+ const int cell_size,
+ const int filter_rows_padding,
+ const int filter_cols_padding,
+ std::vector<std::pair<double, rectangle> >& dets
+ )
+ {
+ dets.clear();
+
+ array2d<float> saliency_image;
+ pyramid_type pyr;
+
+ // for all pyramid levels
+ for (unsigned long l = 0; l < feats.size(); ++l)
+ {
+ const rectangle area = apply_filters_to_fhog(w, feats[l], saliency_image);
+
+ // now search the saliency image for any detections
+ for (long r = area.top(); r <= area.bottom(); ++r)
+ {
+ for (long c = area.left(); c <= area.right(); ++c)
+ {
+ // if we found a detection
+ if (saliency_image[r][c] >= thresh)
+ {
+ rectangle rect = fe.feats_to_image(centered_rect(point(c,r),det_box_width,det_box_height),
+ cell_size, filter_rows_padding, filter_cols_padding);
+ rect = pyr.rect_up(rect, l);
+ dets.push_back(std::make_pair(saliency_image[r][c], rect));
+ }
+ }
+ }
+ }
+
+ std::sort(dets.rbegin(), dets.rend(), compare_pair_rect);
+ }
+
+ inline bool overlaps_any_box (
+ const test_box_overlap& tester,
+ const std::vector<rect_detection>& rects,
+ const rect_detection& rect
+ )
+ {
+ for (unsigned long i = 0; i < rects.size(); ++i)
+ {
+ // Only compare detections from the same detector. That is, we don't want
+ // the output of one detector to stop on the output of another detector.
+ if (rects[i].weight_index == rect.weight_index && tester(rects[i].rect, rect.rect))
+ return true;
+ }
+ return false;
+ }
+
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename Pyramid_type,
+ typename feature_extractor_type
+ >
+ void scan_fhog_pyramid<Pyramid_type,feature_extractor_type>::
+ detect (
+ const fhog_filterbank& w,
+ std::vector<std::pair<double, rectangle> >& dets,
+ const double thresh
+ ) const
+ {
+ // make sure requires clause is not broken
+ DLIB_ASSERT(is_loaded_with_image() &&
+ w.get_num_dimensions() == get_num_dimensions(),
+ "\t void scan_fhog_pyramid::detect()"
+ << "\n\t Invalid inputs were given to this function "
+ << "\n\t is_loaded_with_image(): " << is_loaded_with_image()
+ << "\n\t w.get_num_dimensions(): " << w.get_num_dimensions()
+ << "\n\t get_num_dimensions(): " << get_num_dimensions()
+ << "\n\t this: " << this
+ );
+
+ unsigned long width, height;
+ compute_fhog_window_size(width,height);
+
+ impl::detect_from_fhog_pyramid<pyramid_type>(feats, fe, w, thresh,
+ height-2*padding, width-2*padding, cell_size, height, width, dets);
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename Pyramid_type,
+ typename feature_extractor_type
+ >
+ const rectangle scan_fhog_pyramid<Pyramid_type,feature_extractor_type>::
+ get_best_matching_rect (
+ const rectangle& rect
+ ) const
+ {
+ rectangle mapped_rect, fhog_rect;
+ unsigned long best_level;
+ get_mapped_rect_and_metadata(max_pyramid_levels, rect, mapped_rect, fhog_rect, best_level);
+ return mapped_rect;
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename Pyramid_type,
+ typename feature_extractor_type
+ >
+ void scan_fhog_pyramid<Pyramid_type,feature_extractor_type>::
+ get_mapped_rect_and_metadata (
+ const unsigned long number_pyramid_levels,
+ const rectangle& rect,
+ rectangle& mapped_rect,
+ rectangle& fhog_rect,
+ unsigned long& best_level
+ ) const
+ {
+ pyramid_type pyr;
+ best_level = 0;
+ double best_match_score = -1;
+
+
+ unsigned long width, height;
+ compute_fhog_window_size(width,height);
+
+ // Figure out the pyramid level which best matches rect against our detection
+ // window.
+ for (unsigned long l = 0; l < number_pyramid_levels; ++l)
+ {
+ const rectangle rect_fhog_space = fe.image_to_feats(pyr.rect_down(rect,l), cell_size, height,width);
+
+ const rectangle win_image_space = pyr.rect_up(fe.feats_to_image(centered_rect(center(rect_fhog_space),width-2*padding,height-2*padding), cell_size, height,width), l);
+
+ const double match_score = get_match_score(win_image_space, rect);
+ if (match_score > best_match_score)
+ {
+ best_match_score = match_score;
+ best_level = l;
+ fhog_rect = centered_rect(center(rect_fhog_space), width, height);
+ }
+
+ if (rect_fhog_space.area() <= 1)
+ break;
+ }
+ mapped_rect = pyr.rect_up(fe.feats_to_image(shrink_rect(fhog_rect,padding), cell_size,height,width),best_level);
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename Pyramid_type,
+ typename feature_extractor_type
+ >
+ full_object_detection scan_fhog_pyramid<Pyramid_type,feature_extractor_type>::
+ get_full_object_detection (
+ const rectangle& rect,
+ const feature_vector_type&
+ ) const
+ {
+ return full_object_detection(rect);
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename Pyramid_type,
+ typename feature_extractor_type
+ >
+ void scan_fhog_pyramid<Pyramid_type,feature_extractor_type>::
+ get_feature_vector (
+ const full_object_detection& obj,
+ feature_vector_type& psi
+ ) const
+ {
+ // make sure requires clause is not broken
+ DLIB_ASSERT(is_loaded_with_image() &&
+ psi.size() >= get_num_dimensions() &&
+ obj.num_parts() == 0,
+ "\t void scan_fhog_pyramid::get_feature_vector()"
+ << "\n\t Invalid inputs were given to this function "
+ << "\n\t is_loaded_with_image(): " << is_loaded_with_image()
+ << "\n\t psi.size(): " << psi.size()
+ << "\n\t get_num_dimensions(): " << get_num_dimensions()
+ << "\n\t obj.num_parts(): " << obj.num_parts()
+ << "\n\t this: " << this
+ );
+
+
+
+ rectangle mapped_rect;
+ unsigned long best_level;
+ rectangle fhog_rect;
+ get_mapped_rect_and_metadata(feats.size(), obj.get_rect(), mapped_rect, fhog_rect, best_level);
+
+
+ long i = 0;
+ for (unsigned long ii = 0; ii < feats[best_level].size(); ++ii)
+ {
+ const rectangle rect = get_rect(feats[best_level][0]);
+ for (long r = fhog_rect.top(); r <= fhog_rect.bottom(); ++r)
+ {
+ for (long c = fhog_rect.left(); c <= fhog_rect.right(); ++c)
+ {
+ if (rect.contains(c,r))
+ psi(i) += feats[best_level][ii][r][c];
+ ++i;
+ }
+ }
+ }
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename Pyramid_type,
+ typename feature_extractor_type
+ >
+ void scan_fhog_pyramid<Pyramid_type,feature_extractor_type>::
+ set_min_pyramid_layer_size (
+ unsigned long width,
+ unsigned long height
+ )
+ {
+ // make sure requires clause is not broken
+ DLIB_ASSERT(width > 0 && height > 0 ,
+ "\t void scan_fhog_pyramid::set_min_pyramid_layer_size()"
+ << "\n\t These sizes can't be zero. "
+ << "\n\t width: " << width
+ << "\n\t height: " << height
+ << "\n\t this: " << this
+ );
+
+ min_pyramid_layer_width = width;
+ min_pyramid_layer_height = height;
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename Pyramid_type,
+ typename feature_extractor_type
+ >
+ unsigned long scan_fhog_pyramid<Pyramid_type,feature_extractor_type>::
+ get_min_pyramid_layer_width (
+ ) const
+ {
+ return min_pyramid_layer_width;
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename Pyramid_type,
+ typename feature_extractor_type
+ >
+ unsigned long scan_fhog_pyramid<Pyramid_type,feature_extractor_type>::
+ get_min_pyramid_layer_height (
+ ) const
+ {
+ return min_pyramid_layer_height;
+ }
+
+// ----------------------------------------------------------------------------------------
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename Pyramid_type,
+ typename feature_extractor_type
+ >
+ matrix<unsigned char> draw_fhog (
+ const object_detector<scan_fhog_pyramid<Pyramid_type,feature_extractor_type> >& detector,
+ const unsigned long weight_index = 0,
+ const long cell_draw_size = 15
+ )
+ {
+ // make sure requires clause is not broken
+ DLIB_ASSERT(weight_index < detector.num_detectors(),
+ "\t matrix draw_fhog()"
+ << "\n\t Invalid arguments were given to this function. "
+ << "\n\t weight_index: " << weight_index
+ << "\n\t detector.num_detectors(): " << detector.num_detectors()
+ );
+ DLIB_ASSERT(cell_draw_size > 0 && detector.get_w(weight_index).size() >= detector.get_scanner().get_num_dimensions(),
+ "\t matrix draw_fhog()"
+ << "\n\t Invalid arguments were given to this function. "
+ << "\n\t cell_draw_size: " << cell_draw_size
+ << "\n\t weight_index: " << weight_index
+ << "\n\t detector.get_w(weight_index).size(): " << detector.get_w(weight_index).size()
+ << "\n\t detector.get_scanner().get_num_dimensions(): " << detector.get_scanner().get_num_dimensions()
+ );
+
+ typename scan_fhog_pyramid<Pyramid_type,feature_extractor_type>::fhog_filterbank fb = detector.get_scanner().build_fhog_filterbank(detector.get_w(weight_index));
+ return draw_fhog(fb.get_filters(),cell_draw_size);
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename Pyramid_type,
+ typename feature_extractor_type
+ >
+ unsigned long num_separable_filters (
+ const object_detector<scan_fhog_pyramid<Pyramid_type,feature_extractor_type> >& detector,
+ const unsigned long weight_index = 0
+ )
+ {
+ // make sure requires clause is not broken
+ DLIB_ASSERT(weight_index < detector.num_detectors(),
+ "\t unsigned long num_separable_filters()"
+ << "\n\t Invalid arguments were given to this function. "
+ << "\n\t weight_index: " << weight_index
+ << "\n\t detector.num_detectors(): " << detector.num_detectors()
+ );
+ DLIB_ASSERT(detector.get_w(weight_index).size() >= detector.get_scanner().get_num_dimensions() ,
+ "\t unsigned long num_separable_filters()"
+ << "\n\t Invalid arguments were given to this function. "
+ << "\n\t detector.get_w(weight_index).size(): " << detector.get_w(weight_index).size()
+ << "\n\t detector.get_scanner().get_num_dimensions(): " << detector.get_scanner().get_num_dimensions()
+ );
+
+ typename scan_fhog_pyramid<Pyramid_type,feature_extractor_type>::fhog_filterbank fb = detector.get_scanner().build_fhog_filterbank(detector.get_w(weight_index));
+ return fb.num_separable_filters();
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename Pyramid_type,
+ typename feature_extractor_type
+ >
+ object_detector<scan_fhog_pyramid<Pyramid_type,feature_extractor_type> > threshold_filter_singular_values (
+ const object_detector<scan_fhog_pyramid<Pyramid_type,feature_extractor_type> >& detector,
+ double thresh,
+ const unsigned long weight_index = 0
+ )
+ {
+ // make sure requires clause is not broken
+ DLIB_ASSERT(thresh >= 0 ,
+ "\t object_detector threshold_filter_singular_values()"
+ << "\n\t Invalid inputs were given to this function."
+ << "\n\t thresh: " << thresh
+ );
+
+ DLIB_ASSERT(weight_index < detector.num_detectors(),
+ "\t object_detector threshold_filter_singular_values()"
+ << "\n\t Invalid arguments were given to this function. "
+ << "\n\t weight_index: " << weight_index
+ << "\n\t detector.num_detectors(): " << detector.num_detectors()
+ );
+ DLIB_ASSERT(detector.get_w(weight_index).size() >= detector.get_scanner().get_num_dimensions() ,
+ "\t object_detector threshold_filter_singular_values()"
+ << "\n\t Invalid arguments were given to this function. "
+ << "\n\t detector.get_w(weight_index).size(): " << detector.get_w(weight_index).size()
+ << "\n\t detector.get_scanner().get_num_dimensions(): " << detector.get_scanner().get_num_dimensions()
+ );
+
+
+ const unsigned long width = detector.get_scanner().get_fhog_window_width();
+ const unsigned long height = detector.get_scanner().get_fhog_window_height();
+ const long num_planes = detector.get_scanner().get_feature_extractor().get_num_planes();
+ const long size = width*height;
+
+ std::vector<matrix<double,0,1> > detector_weights;
+ for (unsigned long j = 0; j < detector.num_detectors(); ++j)
+ {
+ matrix<double,0,1> weights = detector.get_w(j);
+
+ if (j == weight_index)
+ {
+ matrix<double> u,v,w,f;
+ for (long i = 0; i < num_planes; ++i)
+ {
+ f = reshape(rowm(weights, range(i*size, (i+1)*size-1)), height, width);
+
+ svd3(f, u,w,v);
+ const double scaled_thresh = std::max(1e-3, max(w)*thresh);
+ w = round_zeros(w, scaled_thresh);
+ f = u*diagm(w)*trans(v);
+
+ set_rowm(weights,range(i*size, (i+1)*size-1)) = reshape_to_column_vector(f);
+ }
+ }
+ detector_weights.push_back(weights);
+ }
+
+ return object_detector<scan_fhog_pyramid<Pyramid_type,feature_extractor_type> >(detector.get_scanner(),
+ detector.get_overlap_tester(),
+ detector_weights);
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename Pyramid_type,
+ typename feature_extractor_type,
+ typename svm_struct_prob_type
+ >
+ void configure_nuclear_norm_regularizer (
+ const scan_fhog_pyramid<Pyramid_type,feature_extractor_type>& scanner,
+ svm_struct_prob_type& prob
+ )
+ {
+ const double strength = scanner.get_nuclear_norm_regularization_strength();
+ const long num_planes = scanner.get_feature_extractor().get_num_planes();
+ if (strength != 0)
+ {
+ const unsigned long width = scanner.get_fhog_window_width();
+ const unsigned long height = scanner.get_fhog_window_height();
+ for (long i = 0; i < num_planes; ++i)
+ {
+ prob.add_nuclear_norm_regularizer(i*width*height, height, width, strength);
+ }
+ prob.set_cache_based_epsilon(0.001);
+ }
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename Pyramid_type,
+ typename feature_extractor_type
+ >
+ struct processed_weight_vector<scan_fhog_pyramid<Pyramid_type,feature_extractor_type> >
+ {
+ processed_weight_vector(){}
+
+ typedef matrix<double,0,1> feature_vector_type;
+ typedef typename scan_fhog_pyramid<Pyramid_type,feature_extractor_type>::fhog_filterbank fhog_filterbank;
+
+ void init (
+ const scan_fhog_pyramid<Pyramid_type,feature_extractor_type>& scanner
+ )
+ {
+ fb = scanner.build_fhog_filterbank(w);
+ }
+
+ const fhog_filterbank& get_detect_argument() const { return fb; }
+
+ feature_vector_type w;
+ fhog_filterbank fb;
+
+ };
+
+// ----------------------------------------------------------------------------------------
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename pyramid_type,
+ typename image_type
+ >
+ void evaluate_detectors (
+ const std::vector<object_detector<scan_fhog_pyramid<pyramid_type> > >& detectors,
+ const image_type& img,
+ std::vector<rect_detection>& dets,
+ const double adjust_threshold = 0
+ )
+ {
+ typedef scan_fhog_pyramid<pyramid_type> scanner_type;
+
+ dets.clear();
+ if (detectors.size() == 0)
+ return;
+
+ const unsigned long cell_size = detectors[0].get_scanner().get_cell_size();
+
+ // Find the maximum sized filters and also most extreme pyramiding settings used.
+ unsigned long max_filter_width = 0;
+ unsigned long max_filter_height = 0;
+ unsigned long min_pyramid_layer_width = std::numeric_limits<unsigned long>::max();
+ unsigned long min_pyramid_layer_height = std::numeric_limits<unsigned long>::max();
+ unsigned long max_pyramid_levels = 0;
+ bool all_cell_sizes_the_same = true;
+ for (unsigned long i = 0; i < detectors.size(); ++i)
+ {
+ const scanner_type& scanner = detectors[i].get_scanner();
+ max_filter_width = std::max(max_filter_width, scanner.get_fhog_window_width());
+ max_filter_height = std::max(max_filter_height, scanner.get_fhog_window_height());
+ max_pyramid_levels = std::max(max_pyramid_levels, scanner.get_max_pyramid_levels());
+ min_pyramid_layer_width = std::min(min_pyramid_layer_width, scanner.get_min_pyramid_layer_width());
+ min_pyramid_layer_height = std::min(min_pyramid_layer_height, scanner.get_min_pyramid_layer_height());
+ if (cell_size != scanner.get_cell_size())
+ all_cell_sizes_the_same = false;
+ }
+
+ std::vector<rect_detection> dets_accum;
+ // Do to the HOG feature extraction to make the fhog pyramid. Again, note that we
+ // are making a pyramid that will work with any of the detectors. But only if all
+ // the cell sizes are the same. If they aren't then we have to calculate the
+ // pyramid for each detector individually.
+ array<array<array2d<float> > > feats;
+ if (all_cell_sizes_the_same)
+ {
+ impl::create_fhog_pyramid<pyramid_type>(img,
+ detectors[0].get_scanner().get_feature_extractor(), feats, cell_size,
+ max_filter_height, max_filter_width, min_pyramid_layer_width,
+ min_pyramid_layer_height, max_pyramid_levels);
+ }
+
+ std::vector<std::pair<double, rectangle> > temp_dets;
+ for (unsigned long i = 0; i < detectors.size(); ++i)
+ {
+ const scanner_type& scanner = detectors[i].get_scanner();
+ if (!all_cell_sizes_the_same)
+ {
+ impl::create_fhog_pyramid<pyramid_type>(img,
+ scanner.get_feature_extractor(), feats, scanner.get_cell_size(),
+ max_filter_height, max_filter_width, min_pyramid_layer_width,
+ min_pyramid_layer_height, max_pyramid_levels);
+ }
+
+ const unsigned long det_box_width = scanner.get_fhog_window_width() - 2*scanner.get_padding();
+ const unsigned long det_box_height = scanner.get_fhog_window_height() - 2*scanner.get_padding();
+ // A single detector object might itself have multiple weight vectors in it. So
+ // we need to evaluate all of them.
+ for (unsigned d = 0; d < detectors[i].num_detectors(); ++d)
+ {
+ const double thresh = detectors[i].get_processed_w(d).w(scanner.get_num_dimensions());
+
+ impl::detect_from_fhog_pyramid<pyramid_type>(feats, scanner.get_feature_extractor(),
+ detectors[i].get_processed_w(d).get_detect_argument(), thresh+adjust_threshold,
+ det_box_height, det_box_width, cell_size, max_filter_height,
+ max_filter_width, temp_dets);
+
+ for (unsigned long j = 0; j < temp_dets.size(); ++j)
+ {
+ rect_detection temp;
+ temp.detection_confidence = temp_dets[j].first-thresh;
+ temp.weight_index = i;
+ temp.rect = temp_dets[j].second;
+ dets_accum.push_back(temp);
+ }
+ }
+ }
+
+
+ // Do non-max suppression
+ if (detectors.size() > 1)
+ std::sort(dets_accum.rbegin(), dets_accum.rend());
+ for (unsigned long i = 0; i < dets_accum.size(); ++i)
+ {
+ const test_box_overlap tester = detectors[dets_accum[i].weight_index].get_overlap_tester();
+ if (impl::overlaps_any_box(tester, dets, dets_accum[i]))
+ continue;
+
+ dets.push_back(dets_accum[i]);
+ }
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename Pyramid_type,
+ typename image_type
+ >
+ std::vector<rectangle> evaluate_detectors (
+ const std::vector<object_detector<scan_fhog_pyramid<Pyramid_type> > >& detectors,
+ const image_type& img,
+ const double adjust_threshold = 0
+ )
+ {
+ std::vector<rectangle> out_dets;
+ std::vector<rect_detection> dets;
+ evaluate_detectors(detectors, img, dets, adjust_threshold);
+ out_dets.reserve(dets.size());
+ for (unsigned long i = 0; i < dets.size(); ++i)
+ out_dets.push_back(dets[i].rect);
+ return out_dets;
+ }
+
+// ----------------------------------------------------------------------------------------
+// ----------------------------------------------------------------------------------------
+
+}
+
+#endif // DLIB_SCAN_fHOG_PYRAMID_Hh_
+
diff --git a/ml/dlib/dlib/image_processing/scan_fhog_pyramid_abstract.h b/ml/dlib/dlib/image_processing/scan_fhog_pyramid_abstract.h
new file mode 100644
index 000000000..d12a2b2b8
--- /dev/null
+++ b/ml/dlib/dlib/image_processing/scan_fhog_pyramid_abstract.h
@@ -0,0 +1,784 @@
+// Copyright (C) 2013 Davis E. King (davis@dlib.net)
+// License: Boost Software License See LICENSE.txt for the full license.
+#undef DLIB_SCAN_fHOG_PYRAMID_ABSTRACT_Hh_
+#ifdef DLIB_SCAN_fHOG_PYRAMID_ABSTRACT_Hh_
+
+#include <vector>
+#include "../image_transforms/fhog_abstract.h"
+#include "object_detector_abstract.h"
+
+namespace dlib
+{
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename Pyramid_type,
+ typename feature_extractor_type
+ >
+ matrix<unsigned char> draw_fhog (
+ const object_detector<scan_fhog_pyramid<Pyramid_type,feature_extractor_type> >& detector,
+ const unsigned long weight_index = 0,
+ const long cell_draw_size = 15
+ );
+ /*!
+ requires
+ - cell_draw_size > 0
+ - weight_index < detector.num_detectors()
+ - detector.get_w(weight_index).size() >= detector.get_scanner().get_num_dimensions()
+ (i.e. the detector must have been populated with a HOG filter)
+ ensures
+ - Converts the HOG filters in the given detector (specifically, the filters in
+ detector.get_w(weight_index)) into an image suitable for display on the
+ screen. In particular, we draw all the HOG cells into a grayscale image in a
+ way that shows the magnitude and orientation of the gradient energy in each
+ cell. The resulting image is then returned.
+ !*/
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename Pyramid_type,
+ typename feature_extractor_type
+ >
+ unsigned long num_separable_filters (
+ const object_detector<scan_fhog_pyramid<Pyramid_type,feature_extractor_type> >& detector,
+ const unsigned long weight_index = 0
+ );
+ /*!
+ requires
+ - weight_index < detector.num_detectors()
+ - detector.get_w(weight_index).size() >= detector.get_scanner().get_num_dimensions()
+ (i.e. the detector must have been populated with a HOG filter)
+ ensures
+ - Returns the number of separable filters necessary to represent the HOG
+ filters in the given detector's weight_index'th filter. This is the filter
+ defined by detector.get_w(weight_index).
+ !*/
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename Pyramid_type,
+ typename feature_extractor_type
+ >
+ object_detector<scan_fhog_pyramid<Pyramid_type,feature_extractor_type> > threshold_filter_singular_values (
+ const object_detector<scan_fhog_pyramid<Pyramid_type,feature_extractor_type> >& detector,
+ double thresh,
+ const unsigned long weight_index = 0
+ );
+ /*!
+ requires
+ - thresh >= 0
+ - weight_index < detector.num_detectors()
+ - detector.get_w(weight_index).size() >= detector.get_scanner().get_num_dimensions()
+ (i.e. the detector must have been populated with a HOG filter)
+ ensures
+ - Removes all components of the filters in the given detector that have
+ singular values that are smaller than the given threshold. Therefore, this
+ function allows you to control how many separable filters are in a detector.
+ In particular, as thresh gets larger the quantity
+ num_separable_filters(threshold_filter_singular_values(detector,thresh,weight_index),weight_index)
+ will generally get smaller and therefore give a faster running detector.
+ However, note that at some point a large enough thresh will drop too much
+ information from the filters and their accuracy will suffer.
+ - returns the updated detector
+ !*/
+
+// ----------------------------------------------------------------------------------------
+
+ class default_fhog_feature_extractor
+ {
+ /*!
+ WHAT THIS OBJECT REPRESENTS
+ The scan_fhog_pyramid object defined below is primarily meant to be used
+ with the feature extraction technique implemented by extract_fhog_features().
+ This technique can generally be understood as taking an input image and
+ outputting a multi-planed output image of floating point numbers that
+ somehow describe the image contents. Since there are many ways to define
+ how this feature mapping is performed, the scan_fhog_pyramid allows you to
+ replace the extract_fhog_features() method with a customized method of your
+ choosing. To do this you implement a class with the same interface as
+ default_fhog_feature_extractor.
+
+ Therefore, the point of default_fhog_feature_extractor is two fold. First,
+ it provides the default FHOG feature extraction method used by scan_fhog_pyramid.
+ Second, it serves to document the interface you need to implement to define
+ your own custom HOG style feature extraction.
+ !*/
+
+ public:
+
+ rectangle image_to_feats (
+ const rectangle& rect,
+ int cell_size,
+ int filter_rows_padding,
+ int filter_cols_padding
+ ) const { return image_to_fhog(rect, cell_size, filter_rows_padding, filter_cols_padding); }
+ /*!
+ requires
+ - cell_size > 0
+ - filter_rows_padding > 0
+ - filter_cols_padding > 0
+ ensures
+ - Maps a rectangle from the coordinates in an input image to the corresponding
+ area in the output feature image.
+ !*/
+
+ rectangle feats_to_image (
+ const rectangle& rect,
+ int cell_size,
+ int filter_rows_padding,
+ int filter_cols_padding
+ ) const { return fhog_to_image(rect, cell_size, filter_rows_padding, filter_cols_padding); }
+ /*!
+ requires
+ - cell_size > 0
+ - filter_rows_padding > 0
+ - filter_cols_padding > 0
+ ensures
+ - Maps a rectangle from the coordinates of the hog feature image back to
+ the input image.
+ - Mapping from feature space to image space is an invertible
+ transformation. That is, for any rectangle R we have:
+ R == image_to_feats(feats_to_image(R,cell_size,filter_rows_padding,filter_cols_padding),
+ cell_size,filter_rows_padding,filter_cols_padding).
+ !*/
+
+ template <
+ typename image_type
+ >
+ void operator()(
+ const image_type& img,
+ dlib::array<array2d<float> >& hog,
+ int cell_size,
+ int filter_rows_padding,
+ int filter_cols_padding
+ ) const { extract_fhog_features(img,hog,cell_size,filter_rows_padding,filter_cols_padding); }
+ /*!
+ requires
+ - image_type == is an implementation of array2d/array2d_kernel_abstract.h
+ - img contains some kind of pixel type.
+ (i.e. pixel_traits<typename image_type::type> is defined)
+ ensures
+ - Extracts FHOG features by calling extract_fhog_features(). The results are
+ stored into #hog. Note that if you are implementing your own feature extractor you can
+ pretty much do whatever you want in terms of feature extraction so long as the following
+ conditions are met:
+ - #hog.size() == get_num_planes()
+ - Each image plane in #hog has the same dimensions.
+ - for all valid i, r, and c:
+ - #hog[i][r][c] == a feature value describing the image content centered at the
+ following pixel location in img:
+ feats_to_image(point(c,r),cell_size,filter_rows_padding,filter_cols_padding)
+ !*/
+
+ inline unsigned long get_num_planes (
+ ) const { return 31; }
+ /*!
+ ensures
+ - returns the number of planes in the hog image output by the operator()
+ method.
+ !*/
+ };
+
+ inline void serialize (const default_fhog_feature_extractor&, std::ostream&) {}
+ inline void deserialize (default_fhog_feature_extractor&, std::istream&) {}
+ /*!
+ Provides serialization support. Note that there is no state in the default hog
+ feature extractor so these functions do nothing. But if you define a custom
+ feature extractor then make sure you remember to serialize any state in your
+ feature extractor.
+ !*/
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename Pyramid_type,
+ typename Feature_extractor_type = default_fhog_feature_extractor
+ >
+ class scan_fhog_pyramid : noncopyable
+ {
+ /*!
+ REQUIREMENTS ON Pyramid_type
+ - Must be one of the pyramid_down objects defined in
+ dlib/image_transforms/image_pyramid_abstract.h or an object with a
+ compatible interface
+
+ REQUIREMENTS ON Feature_extractor_type
+ - Must be a type with an interface compatible with the
+ default_fhog_feature_extractor.
+
+ INITIAL VALUE
+ - get_padding() == 1
+ - get_cell_size() == 8
+ - get_detection_window_width() == 64
+ - get_detection_window_height() == 64
+ - get_max_pyramid_levels() == 1000
+ - get_min_pyramid_layer_width() == 64
+ - get_min_pyramid_layer_height() == 64
+ - get_nuclear_norm_regularization_strength() == 0
+
+ WHAT THIS OBJECT REPRESENTS
+ This object is a tool for running a fixed sized sliding window classifier
+ over an image pyramid. In particular, it slides a linear classifier over
+ a HOG pyramid as discussed in the paper:
+ Histograms of Oriented Gradients for Human Detection by Navneet Dalal
+ and Bill Triggs, CVPR 2005
+ However, we augment the method slightly to use the version of HOG features
+ from:
+ Object Detection with Discriminatively Trained Part Based Models by
+ P. Felzenszwalb, R. Girshick, D. McAllester, D. Ramanan
+ IEEE Transactions on Pattern Analysis and Machine Intelligence, Vol. 32, No. 9, Sep. 2010
+ Since these HOG features have been shown to give superior performance.
+
+ THREAD SAFETY
+ Concurrent access to an instance of this object is not safe and should be
+ protected by a mutex lock except for the case where you are copying the
+ configuration (via copy_configuration()) of a scan_fhog_pyramid object to
+ many other threads. In this case, it is safe to copy the configuration of
+ a shared object so long as no other operations are performed on it.
+ !*/
+
+ public:
+ typedef matrix<double,0,1> feature_vector_type;
+ typedef Pyramid_type pyramid_type;
+ typedef Feature_extractor_type feature_extractor_type;
+
+ scan_fhog_pyramid (
+ );
+ /*!
+ ensures
+ - this object is properly initialized
+ !*/
+
+ explicit scan_fhog_pyramid (
+ const feature_extractor_type& fe
+ );
+ /*!
+ ensures
+ - this object is properly initialized
+ - #get_feature_extractor() == fe
+ !*/
+
+ template <
+ typename image_type
+ >
+ void load (
+ const image_type& img
+ );
+ /*!
+ requires
+ - image_type == is an implementation of array2d/array2d_kernel_abstract.h
+ - img contains some kind of pixel type.
+ (i.e. pixel_traits<typename image_type::type> is defined)
+ ensures
+ - #is_loaded_with_image() == true
+ - This object is ready to run a classifier over img to detect object
+ locations. Call detect() to do this.
+ !*/
+
+ const feature_extractor_type& get_feature_extractor(
+ ) const;
+ /*!
+ ensures
+ - returns a const reference to the feature extractor used by this object.
+ !*/
+
+ bool is_loaded_with_image (
+ ) const;
+ /*!
+ ensures
+ - returns true if this object has been loaded with an image to process and
+ false otherwise.
+ !*/
+
+ void copy_configuration (
+ const scan_fhog_pyramid& item
+ );
+ /*!
+ ensures
+ - Copies all the state information of item into *this, except for state
+ information populated by load(). More precisely, given two scan_fhog_pyramid
+ objects S1 and S2, the following sequence of instructions should always
+ result in both of them having the exact same state:
+ S2.copy_configuration(S1);
+ S1.load(img);
+ S2.load(img);
+ !*/
+
+ void set_detection_window_size (
+ unsigned long window_width,
+ unsigned long window_height
+ );
+ /*!
+ requires
+ - window_width > 0
+ - window_height > 0
+ ensures
+ - When detect() is called, this object scans a window that is of the given
+ width and height (in pixels) over each layer in an image pyramid. This
+ means that the rectangle detections which come out of detect() will have
+ a width to height ratio approximately equal to window_width/window_height
+ and will be approximately window_width*window_height pixels in area or
+ larger. Therefore, the smallest object that can be detected is roughly
+ window_width by window_height pixels in size.
+ - #get_detection_window_width() == window_width
+ - #get_detection_window_height() == window_height
+ - Since we use a HOG feature representation, the detection procedure works
+ as follows:
+ Step 1. Make an image pyramid.
+ Step 2. Convert each layer of the image pyramid into a multi-planed HOG "image".
+ (the number of bands is given by get_feature_extractor().get_num_planes())
+ Step 3. Scan a linear classifier over each HOG image in the pyramid.
+ Moreover, the HOG features quantize the input image into a grid of cells,
+ each cell being get_cell_size() by get_cell_size() pixels in size. So
+ when we scan the object detector over the pyramid we are scanning an
+ appropriately sized window over these smaller quantized HOG features. In
+ particular, the size of the window we scan over the HOG feature pyramid
+ is #get_fhog_window_width() by #get_fhog_window_height() HOG cells in
+ size.
+ - #is_loaded_with_image() == false
+ !*/
+
+ unsigned long get_detection_window_width (
+ ) const;
+ /*!
+ ensures
+ - returns the width, in pixels, of the detection window that is scanned
+ over the image when detect() is called.
+ !*/
+
+ inline unsigned long get_detection_window_height (
+ ) const;
+ /*!
+ ensures
+ - returns the height, in pixels, of the detection window that is scanned
+ over the image when detect() is called.
+ !*/
+
+ unsigned long get_fhog_window_width (
+ ) const;
+ /*!
+ ensures
+ - Returns the width of the HOG scanning window in terms of HOG cell blocks.
+ Note that this is a function of get_detection_window_width(), get_cell_size(),
+ and get_padding() and is therefore not something you set directly.
+ - #get_fhog_window_width() is approximately equal to the number of HOG cells
+ that fit into get_detection_window_width() pixels plus 2*get_padding()
+ since we include additional padding around each window to add context.
+ !*/
+
+ unsigned long get_fhog_window_height (
+ ) const;
+ /*!
+ ensures
+ - Returns the height of the HOG scanning window in terms of HOG cell blocks.
+ Note that this is a function of get_detection_window_height(), get_cell_size(),
+ and get_padding() and is therefore not something you set directly.
+ - #get_fhog_window_height() is approximately equal to the number of HOG cells
+ that fit into get_detection_window_height() pixels plus 2*get_padding()
+ since we include additional padding around each window to add context.
+ !*/
+
+ void set_padding (
+ unsigned long new_padding
+ );
+ /*!
+ ensures
+ - #get_padding() == new_padding
+ - #is_loaded_with_image() == false
+ !*/
+
+ unsigned long get_padding (
+ ) const;
+ /*!
+ ensures
+ - The HOG windows scanned over the HOG pyramid can include additional HOG
+ cells outside the detection window. This can help add context and
+ improve detection accuracy. This function returns the number of extra
+ HOG cells added onto the border of the HOG windows which are scanned by
+ detect().
+ !*/
+
+ unsigned long get_cell_size (
+ ) const;
+ /*!
+ ensures
+ - Returns the size of the HOG cells. Each HOG cell is square and contains
+ get_cell_size()*get_cell_size() pixels.
+ !*/
+
+ void set_cell_size (
+ unsigned long new_cell_size
+ );
+ /*!
+ requires
+ - new_cell_size > 0
+ ensures
+ - #get_cell_size() == new_cell_size
+ - #is_loaded_with_image() == false
+ !*/
+
+ inline long get_num_dimensions (
+ ) const;
+ /*!
+ ensures
+ - returns get_fhog_window_width()*get_fhog_window_height()*get_feature_extractor().get_num_planes()
+ (i.e. The number of features is equal to the size of the HOG window times
+ the number of planes output by the feature extractor. )
+ !*/
+
+ inline unsigned long get_num_detection_templates (
+ ) const { return 1; }
+ /*!
+ ensures
+ - returns 1. Note that this function is here only for compatibility with
+ the scan_image_pyramid object. Notionally, its return value indicates
+ that a scan_fhog_pyramid object is always ready to detect objects once
+ an image has been loaded.
+ !*/
+
+ inline unsigned long get_num_movable_components_per_detection_template (
+ ) const { return 0; }
+ /*!
+ ensures
+ - returns 0. Note that this function is here only for compatibility with
+ the scan_image_pyramid object. Its return value means that this object
+ does not support using movable part models.
+ !*/
+
+ unsigned long get_max_pyramid_levels (
+ ) const;
+ /*!
+ ensures
+ - returns the maximum number of image pyramid levels this object will use.
+ Note that #get_max_pyramid_levels() == 1 indicates that no image pyramid
+ will be used at all. That is, only the original image will be processed
+ and no lower scale versions will be created.
+ !*/
+
+ void set_max_pyramid_levels (
+ unsigned long max_levels
+ );
+ /*!
+ requires
+ - max_levels > 0
+ ensures
+ - #get_max_pyramid_levels() == max_levels
+ !*/
+
+ void set_min_pyramid_layer_size (
+ unsigned long width,
+ unsigned long height
+ );
+ /*!
+ requires
+ - width > 0
+ - height > 0
+ ensures
+ - #get_min_pyramid_layer_width() == width
+ - #get_min_pyramid_layer_height() == height
+ !*/
+
+ inline unsigned long get_min_pyramid_layer_width (
+ ) const;
+ /*!
+ ensures
+ - returns the smallest allowable width of an image in the image pyramid.
+ All pyramids will always include the original input image, however, no
+ pyramid levels will be created which have a width smaller than the
+ value returned by this function.
+ !*/
+
+ inline unsigned long get_min_pyramid_layer_height (
+ ) const;
+ /*!
+ ensures
+ - returns the smallest allowable height of an image in the image pyramid.
+ All pyramids will always include the original input image, however, no
+ pyramid levels will be created which have a height smaller than the
+ value returned by this function.
+ !*/
+
+ fhog_filterbank build_fhog_filterbank (
+ const feature_vector_type& weights
+ ) const;
+ /*!
+ requires
+ - weights.size() >= get_num_dimensions()
+ ensures
+ - Creates and then returns a fhog_filterbank object FB such that:
+ - FB.get_num_dimensions() == get_num_dimensions()
+ - FB.get_filters() == the values in weights unpacked into get_feature_extractor().get_num_planes() filters.
+ - FB.num_separable_filters() == the number of separable filters necessary to
+ represent all the filters in FB.get_filters().
+ !*/
+
+ class fhog_filterbank
+ {
+ /*!
+ WHAT THIS OBJECT REPRESENTS
+ This object represents a HOG filter bank. That is, the classifier that is
+ slid over a HOG pyramid is a set of get_feature_extractor().get_num_planes()
+ linear filters, each get_fhog_window_width() rows by get_fhog_window_height()
+ columns in size. This object contains that set of filters.
+ !*/
+
+ public:
+ long get_num_dimensions(
+ ) const;
+ /*!
+ ensures
+ - Returns the total number of values in the filters.
+ !*/
+
+ const std::vector<matrix<float> >& get_filters(
+ ) const;
+ /*!
+ ensures
+ - returns the set of HOG filters in this object.
+ !*/
+
+ unsigned long num_separable_filters(
+ ) const;
+ /*!
+ ensures
+ - returns the number of separable filters necessary to represent all
+ the filters in get_filters().
+ !*/
+ };
+
+ void detect (
+ const fhog_filterbank& w,
+ std::vector<std::pair<double, rectangle> >& dets,
+ const double thresh
+ ) const;
+ /*!
+ requires
+ - w.get_num_dimensions() == get_num_dimensions()
+ - is_loaded_with_image() == true
+ ensures
+ - Scans the HOG filter defined by w over the HOG pyramid that was populated
+ by the last call to load() and stores all object detections into #dets.
+ - for all valid i:
+ - #dets[i].second == The object box which produced this detection. This rectangle gives
+ the location of the detection. Note that the rectangle will have been converted back into
+ the original image input space. That is, if this detection was made at a low level in the
+ image pyramid then the object box will have been automatically mapped up the pyramid layers
+ to the original image space. Or in other words, if you plot #dets[i].second on top of the
+ image given to load() it will show up in the right place.
+ - #dets[i].first == The score for this detection. This value is equal to dot(w, feature vector
+ for this sliding window location).
+ - #dets[i].first >= thresh
+ - #dets will be sorted in descending order. (i.e. #dets[i].first >= #dets[j].first for all i, and j>i)
+ - Elements of w beyond index get_num_dimensions()-1 are ignored. I.e. only the first
+ get_num_dimensions() are used.
+ - Note that no form of non-max suppression is performed. If a window has a score >= thresh
+ then it is reported in #dets.
+ !*/
+
+ void detect (
+ const feature_vector_type& w,
+ std::vector<std::pair<double, rectangle> >& dets,
+ const double thresh
+ ) const;
+ /*!
+ requires
+ - w.size() >= get_num_dimensions()
+ - is_loaded_with_image() == true
+ ensures
+ - performs: detect(build_fhog_filterbank(w), dets, thresh)
+ !*/
+
+ void get_feature_vector (
+ const full_object_detection& obj,
+ feature_vector_type& psi
+ ) const;
+ /*!
+ requires
+ - obj.num_parts() == 0
+ - is_loaded_with_image() == true
+ - psi.size() >= get_num_dimensions()
+ (i.e. psi must have preallocated its memory before this function is called)
+ ensures
+ - This function allows you to determine the feature vector used for an
+ object detection output from detect(). Note that this vector is
+ added to psi. Note also that you can use get_full_object_detection() to
+ convert a rectangle from detect() into the needed full_object_detection.
+ - The dimensionality of the vector added to psi is get_num_dimensions(). This
+ means that elements of psi after psi(get_num_dimensions()-1) are not modified.
+ - Since scan_fhog_pyramid only searches a limited set of object locations,
+ not all possible rectangles can be output by detect(). So in the case
+ where obj.get_rect() could not arise from a call to detect(), this
+ function will map obj.get_rect() to the nearest possible rectangle and
+ then add the feature vector for the mapped rectangle into #psi.
+ - get_best_matching_rect(obj.get_rect()) == the rectangle obj.get_rect()
+ gets mapped to for feature extraction.
+ !*/
+
+ full_object_detection get_full_object_detection (
+ const rectangle& rect,
+ const feature_vector_type& w
+ ) const;
+ /*!
+ ensures
+ - returns full_object_detection(rect)
+ (This function is here only for compatibility with the scan_image_pyramid
+ object)
+ !*/
+
+ const rectangle get_best_matching_rect (
+ const rectangle& rect
+ ) const;
+ /*!
+ ensures
+ - Since scan_fhog_pyramid only searches a limited set of object locations,
+ not all possible rectangles can be represented. Therefore, this function
+ allows you to supply a rectangle and obtain the nearest possible
+ candidate object location rectangle.
+ !*/
+
+ double get_nuclear_norm_regularization_strength (
+ ) const;
+ /*!
+ ensures
+ - If the number of separable filters in a fhog_filterbank is small then the
+ filter bank can be scanned over an image much faster than a normal set of
+ filters. Therefore, this object provides the option to encourage
+ machine learning methods that learn a HOG filter bank (i.e.
+ structural_object_detection_trainer) to select filter banks that have
+ this beneficial property. In particular, the value returned by
+ get_nuclear_norm_regularization_strength() is a multiplier on a nuclear
+ norm regularizer which will encourage the selection of filters that use a
+ small number of separable components. Larger values encourage tend to
+ give a smaller number of separable filters.
+ - if (get_nuclear_norm_regularization_strength() == 0) then
+ - This feature is disabled
+ - else
+ - A nuclear norm regularizer will be added when
+ structural_object_detection_trainer is used to learn a HOG filter
+ bank. Note that this can make the training process take
+ significantly longer (but can result in faster object detectors).
+ !*/
+
+ void set_nuclear_norm_regularization_strength (
+ double strength
+ );
+ /*!
+ requires
+ - strength >= 0
+ ensures
+ - #get_nuclear_norm_regularization_strength() == strength
+ !*/
+
+ };
+
+// ----------------------------------------------------------------------------------------
+
+ template <typename T>
+ void serialize (
+ const scan_fhog_pyramid<T>& item,
+ std::ostream& out
+ );
+ /*!
+ provides serialization support
+ !*/
+
+// ----------------------------------------------------------------------------------------
+
+ template <typename T>
+ void deserialize (
+ scan_fhog_pyramid<T>& item,
+ std::istream& in
+ );
+ /*!
+ provides deserialization support
+ !*/
+
+// ----------------------------------------------------------------------------------------
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename pyramid_type,
+ typename image_type
+ >
+ void evaluate_detectors (
+ const std::vector<object_detector<scan_fhog_pyramid<pyramid_type>>>& detectors,
+ const image_type& img,
+ std::vector<rect_detection>& dets,
+ const double adjust_threshold = 0
+ );
+ /*!
+ requires
+ - image_type == is an implementation of array2d/array2d_kernel_abstract.h
+ - img contains some kind of pixel type.
+ (i.e. pixel_traits<typename image_type::type> is defined)
+ ensures
+ - This function runs each of the provided object_detector objects over img and
+ stores the resulting detections into #dets. Importantly, this function is
+ faster than running each detector individually because it computes the HOG
+ features only once and then reuses them for each detector. However, it is
+ important to note that this speedup is only possible if all the detectors use
+ the same cell_size parameter that determines how HOG features are computed.
+ If different cell_size values are used then this function will not be any
+ faster than running the detectors individually.
+ - This function applies non-max suppression individually to the output of each
+ detector. Therefore, the output is the same as if you ran each detector
+ individually and then concatenated the results.
+ - To be precise, this function performs object detection on the given image and
+ stores the detected objects into #dets. In particular, we will have that:
+ - #dets is sorted such that the highest confidence detections come first.
+ E.g. element 0 is the best detection, element 1 the next best, and so on.
+ - #dets.size() == the number of detected objects.
+ - #dets[i].detection_confidence == The strength of the i-th detection.
+ Larger values indicate that the detector is more confident that #dets[i]
+ is a correct detection rather than being a false alarm. Moreover, the
+ detection_confidence is equal to the detection value output by the
+ scanner minus the threshold value stored at the end of the weight vector.
+ - #dets[i].rect == the bounding box for the i-th detection.
+ - The detection #dets[i].rect was produced by detectors[#dets[i].weight_index].
+ - The detection threshold is adjusted by having adjust_threshold added to it.
+ Therefore, an adjust_threshold value > 0 makes detecting objects harder while
+ a negative value makes it easier. Moreover, the following will be true for
+ all valid i:
+ - #dets[i].detection_confidence >= adjust_threshold
+ This means that, for example, you can obtain the maximum possible number of
+ detections by setting adjust_threshold equal to negative infinity.
+ - This function is threadsafe in the sense that multiple threads can call
+ evaluate_detectors() with the same instances of detectors and img without
+ requiring a mutex lock.
+ !*/
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename pyramid_type,
+ typename image_type
+ >
+ std::vector<rectangle> evaluate_detectors (
+ const std::vector<object_detector<scan_fhog_pyramid<pyramid_type>>>& detectors,
+ const image_type& img,
+ const double adjust_threshold = 0
+ );
+ /*!
+ requires
+ - image_type == is an implementation of array2d/array2d_kernel_abstract.h
+ - img contains some kind of pixel type.
+ (i.e. pixel_traits<typename image_type::type> is defined)
+ ensures
+ - This function just calls the above evaluate_detectors() routine and copies
+ the output dets into a vector<rectangle> object and returns it. Therefore,
+ this function is provided for convenience.
+ - This function is threadsafe in the sense that multiple threads can call
+ evaluate_detectors() with the same instances of detectors and img without
+ requiring a mutex lock.
+ !*/
+
+// ----------------------------------------------------------------------------------------
+
+}
+
+#endif // DLIB_SCAN_fHOG_PYRAMID_ABSTRACT_Hh_
+
+
diff --git a/ml/dlib/dlib/image_processing/scan_image.h b/ml/dlib/dlib/image_processing/scan_image.h
new file mode 100644
index 000000000..1a9c46eda
--- /dev/null
+++ b/ml/dlib/dlib/image_processing/scan_image.h
@@ -0,0 +1,368 @@
+// Copyright (C) 2011 Davis E. King (davis@dlib.net)
+// License: Boost Software License See LICENSE.txt for the full license.
+#ifndef DLIB_SCAN_iMAGE_Hh_
+#define DLIB_SCAN_iMAGE_Hh_
+
+#include <vector>
+#include <utility>
+#include "scan_image_abstract.h"
+#include "../matrix.h"
+#include "../algs.h"
+#include "../rand.h"
+#include "../array2d.h"
+#include "../image_transforms/spatial_filtering.h"
+
+namespace dlib
+{
+
+// ----------------------------------------------------------------------------------------
+
+ namespace impl
+ {
+
+ inline rectangle bounding_box_of_rects (
+ const std::vector<std::pair<unsigned int, rectangle> >& rects,
+ const point& position
+ )
+ /*!
+ ensures
+ - returns the smallest rectangle that contains all the
+ rectangles in rects. That is, returns the rectangle that
+ contains translate_rect(rects[i].second,position) for all valid i.
+ !*/
+ {
+ rectangle rect;
+
+ for (unsigned long i = 0; i < rects.size(); ++i)
+ {
+ rect += translate_rect(rects[i].second,position);
+ }
+
+ return rect;
+ }
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename image_array_type
+ >
+ bool all_images_same_size (
+ const image_array_type& images
+ )
+ {
+ if (images.size() == 0)
+ return true;
+
+ for (unsigned long i = 0; i < images.size(); ++i)
+ {
+ if (num_rows(images[0]) != num_rows(images[i]) ||
+ num_columns(images[0]) != num_columns(images[i]))
+ return false;
+ }
+
+ return true;
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename image_array_type
+ >
+ double sum_of_rects_in_images (
+ const image_array_type& images,
+ const std::vector<std::pair<unsigned int, rectangle> >& rects,
+ const point& position
+ )
+ {
+ DLIB_ASSERT(all_images_same_size(images),
+ "\t double sum_of_rects_in_images()"
+ << "\n\t Invalid arguments given to this function."
+ << "\n\t all_images_same_size(images): " << all_images_same_size(images)
+ );
+#ifdef ENABLE_ASSERTS
+ for (unsigned long i = 0; i < rects.size(); ++i)
+ {
+ DLIB_ASSERT(rects[i].first < images.size(),
+ "\t double sum_of_rects_in_images()"
+ << "\n\t rects["<<i<<"].first must refer to a valid image."
+ << "\n\t rects["<<i<<"].first: " << rects[i].first
+ << "\n\t images.size(): " << images.size()
+ );
+ }
+#endif
+
+
+ typedef typename image_traits<typename image_array_type::type>::pixel_type pixel_type;
+ typedef typename promote<pixel_type>::type ptype;
+
+ ptype temp = 0;
+
+ for (unsigned long i = 0; i < rects.size(); ++i)
+ {
+ const typename image_array_type::type& img = images[rects[i].first];
+ const rectangle rect = get_rect(img).intersect(translate_rect(rects[i].second,position));
+ temp += sum(matrix_cast<ptype>(subm(mat(img), rect)));
+ }
+
+ return static_cast<double>(temp);
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename image_array_type
+ >
+ double sum_of_rects_in_images_movable_parts (
+ const image_array_type& images,
+ const rectangle& window,
+ const std::vector<std::pair<unsigned int, rectangle> >& fixed_rects,
+ const std::vector<std::pair<unsigned int, rectangle> >& movable_rects,
+ const point& position
+ )
+ {
+ DLIB_ASSERT(all_images_same_size(images) && center(window) == point(0,0),
+ "\t double sum_of_rects_in_images_movable_parts()"
+ << "\n\t Invalid arguments given to this function."
+ << "\n\t all_images_same_size(images): " << all_images_same_size(images)
+ << "\n\t center(window): " << center(window)
+ );
+#ifdef ENABLE_ASSERTS
+ for (unsigned long i = 0; i < fixed_rects.size(); ++i)
+ {
+ DLIB_ASSERT(fixed_rects[i].first < images.size(),
+ "\t double sum_of_rects_in_images_movable_parts()"
+ << "\n\t fixed_rects["<<i<<"].first must refer to a valid image."
+ << "\n\t fixed_rects["<<i<<"].first: " << fixed_rects[i].first
+ << "\n\t images.size(): " << images.size()
+ );
+ }
+ for (unsigned long i = 0; i < movable_rects.size(); ++i)
+ {
+ DLIB_ASSERT(movable_rects[i].first < images.size(),
+ "\t double sum_of_rects_in_images_movable_parts()"
+ << "\n\t movable_rects["<<i<<"].first must refer to a valid image."
+ << "\n\t movable_rects["<<i<<"].first: " << movable_rects[i].first
+ << "\n\t images.size(): " << images.size()
+ );
+ DLIB_ASSERT(center(movable_rects[i].second) == point(0,0),
+ "\t double sum_of_rects_in_images_movable_parts()"
+ << "\n\t movable_rects["<<i<<"].second: " << movable_rects[i].second
+ );
+ }
+#endif
+ typedef typename image_traits<typename image_array_type::type>::pixel_type pixel_type;
+ typedef typename promote<pixel_type>::type ptype;
+
+ ptype temp = 0;
+
+ // compute TOTAL_FIXED part
+ for (unsigned long i = 0; i < fixed_rects.size(); ++i)
+ {
+ const typename image_array_type::type& img = images[fixed_rects[i].first];
+ const rectangle rect = get_rect(img).intersect(translate_rect(fixed_rects[i].second,position));
+ temp += sum(matrix_cast<ptype>(subm(mat(img), rect)));
+ }
+
+ if (images.size() > 0)
+ {
+ // compute TOTAL_MOVABLE part
+ array2d<ptype> tempimg(images[0].nr(), images[0].nc());
+ for (unsigned long i = 0; i < movable_rects.size(); ++i)
+ {
+ const typename image_array_type::type& img = images[movable_rects[i].first];
+
+ sum_filter_assign(img, tempimg, movable_rects[i].second);
+
+ const rectangle rect = get_rect(tempimg).intersect(translate_rect(window,position));
+ if (rect.is_empty() == false)
+ temp += std::max(0,max(matrix_cast<ptype>(subm(mat(tempimg), rect))));
+ }
+ }
+
+ return static_cast<double>(temp);
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename image_type
+ >
+ void find_points_above_thresh (
+ std::vector<std::pair<double, point> >& dets,
+ const image_type& img_,
+ const double thresh,
+ const unsigned long max_dets
+ )
+ {
+ const_image_view<image_type> img(img_);
+ typedef typename image_traits<image_type>::pixel_type ptype;
+
+ dets.clear();
+ if (max_dets == 0)
+ return;
+
+ unsigned long count = 0;
+ dlib::rand rnd;
+ for (long r = 0; r < img.nr(); ++r)
+ {
+ for (long c = 0; c < img.nc(); ++c)
+ {
+ const ptype val = img[r][c];
+ if (val >= thresh)
+ {
+ ++count;
+
+ if (dets.size() < max_dets)
+ {
+ dets.push_back(std::make_pair(val, point(c,r)));
+ }
+ else
+ {
+ // The idea here is to cause us to randomly sample possible detection
+ // locations throughout the image rather than just stopping the detection
+ // procedure once we hit the max_dets limit. So this method will result
+ // in a random subsample of all the detections >= thresh being in dets
+ // at the end of scan_image().
+ const unsigned long random_index = rnd.get_random_32bit_number()%count;
+ if (random_index < dets.size())
+ {
+ dets[random_index] = std::make_pair(val, point(c,r));
+ }
+ }
+ }
+ }
+ }
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename image_array_type
+ >
+ void scan_image (
+ std::vector<std::pair<double, point> >& dets,
+ const image_array_type& images,
+ const std::vector<std::pair<unsigned int, rectangle> >& rects,
+ const double thresh,
+ const unsigned long max_dets
+ )
+ {
+ DLIB_ASSERT(images.size() > 0 && rects.size() > 0 && all_images_same_size(images),
+ "\t void scan_image()"
+ << "\n\t Invalid arguments given to this function."
+ << "\n\t images.size(): " << images.size()
+ << "\n\t rects.size(): " << rects.size()
+ << "\n\t all_images_same_size(images): " << all_images_same_size(images)
+ );
+#ifdef ENABLE_ASSERTS
+ for (unsigned long i = 0; i < rects.size(); ++i)
+ {
+ DLIB_ASSERT(rects[i].first < images.size(),
+ "\t void scan_image()"
+ << "\n\t rects["<<i<<"].first must refer to a valid image."
+ << "\n\t rects["<<i<<"].first: " << rects[i].first
+ << "\n\t images.size(): " << images.size()
+ );
+ }
+#endif
+
+
+
+
+ typedef typename image_traits<typename image_array_type::type>::pixel_type pixel_type;
+ typedef typename promote<pixel_type>::type ptype;
+
+ array2d<ptype> accum(images[0].nr(), images[0].nc());
+ assign_all_pixels(accum, 0);
+
+ for (unsigned long i = 0; i < rects.size(); ++i)
+ sum_filter(images[rects[i].first], accum, rects[i].second);
+
+ find_points_above_thresh(dets, accum, thresh, max_dets);
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename image_array_type
+ >
+ void scan_image_movable_parts (
+ std::vector<std::pair<double, point> >& dets,
+ const image_array_type& images,
+ const rectangle& window,
+ const std::vector<std::pair<unsigned int, rectangle> >& fixed_rects,
+ const std::vector<std::pair<unsigned int, rectangle> >& movable_rects,
+ const double thresh,
+ const unsigned long max_dets
+ )
+ {
+ DLIB_ASSERT(images.size() > 0 && all_images_same_size(images) &&
+ center(window) == point(0,0) && window.area() > 0,
+ "\t void scan_image_movable_parts()"
+ << "\n\t Invalid arguments given to this function."
+ << "\n\t all_images_same_size(images): " << all_images_same_size(images)
+ << "\n\t center(window): " << center(window)
+ << "\n\t window.area(): " << window.area()
+ << "\n\t images.size(): " << images.size()
+ );
+#ifdef ENABLE_ASSERTS
+ for (unsigned long i = 0; i < fixed_rects.size(); ++i)
+ {
+ DLIB_ASSERT(fixed_rects[i].first < images.size(),
+ "\t void scan_image_movable_parts()"
+ << "\n\t Invalid arguments given to this function."
+ << "\n\t fixed_rects["<<i<<"].first must refer to a valid image."
+ << "\n\t fixed_rects["<<i<<"].first: " << fixed_rects[i].first
+ << "\n\t images.size(): " << images.size()
+ );
+ }
+ for (unsigned long i = 0; i < movable_rects.size(); ++i)
+ {
+ DLIB_ASSERT(movable_rects[i].first < images.size(),
+ "\t void scan_image_movable_parts()"
+ << "\n\t Invalid arguments given to this function."
+ << "\n\t movable_rects["<<i<<"].first must refer to a valid image."
+ << "\n\t movable_rects["<<i<<"].first: " << movable_rects[i].first
+ << "\n\t images.size(): " << images.size()
+ );
+ DLIB_ASSERT(center(movable_rects[i].second) == point(0,0) &&
+ movable_rects[i].second.area() > 0,
+ "\t void scan_image_movable_parts()"
+ << "\n\t Invalid arguments given to this function."
+ << "\n\t movable_rects["<<i<<"].second: " << movable_rects[i].second
+ << "\n\t movable_rects["<<i<<"].second.area(): " << movable_rects[i].second.area()
+ );
+ }
+#endif
+
+ if (movable_rects.size() == 0 && fixed_rects.size() == 0)
+ return;
+
+ typedef typename image_traits<typename image_array_type::type>::pixel_type pixel_type;
+ typedef typename promote<pixel_type>::type ptype;
+
+ array2d<ptype> accum(images[0].nr(), images[0].nc());
+ assign_all_pixels(accum, 0);
+
+ for (unsigned long i = 0; i < fixed_rects.size(); ++i)
+ sum_filter(images[fixed_rects[i].first], accum, fixed_rects[i].second);
+
+ array2d<ptype> temp(accum.nr(), accum.nc());
+ for (unsigned long i = 0; i < movable_rects.size(); ++i)
+ {
+ const rectangle rect = movable_rects[i].second;
+ sum_filter_assign(images[movable_rects[i].first], temp, rect);
+ max_filter(temp, accum, window.width(), window.height(), 0);
+ }
+
+ find_points_above_thresh(dets, accum, thresh, max_dets);
+ }
+
+// ----------------------------------------------------------------------------------------
+
+}
+
+#endif // DLIB_SCAN_iMAGE_Hh_
+
+
diff --git a/ml/dlib/dlib/image_processing/scan_image_abstract.h b/ml/dlib/dlib/image_processing/scan_image_abstract.h
new file mode 100644
index 000000000..fe2fc51ac
--- /dev/null
+++ b/ml/dlib/dlib/image_processing/scan_image_abstract.h
@@ -0,0 +1,227 @@
+// Copyright (C) 2011 Davis E. King (davis@dlib.net)
+// License: Boost Software License See LICENSE.txt for the full license.
+#undef DLIB_SCAN_iMAGE_ABSTRACT_Hh_
+#ifdef DLIB_SCAN_iMAGE_ABSTRACT_Hh_
+
+#include <vector>
+#include <utility>
+#include "../algs.h"
+#include "../image_processing/generic_image.h"
+
+namespace dlib
+{
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename image_array_type
+ >
+ bool all_images_same_size (
+ const image_array_type& images
+ );
+ /*!
+ requires
+ - image_array_type == an implementation of array/array_kernel_abstract.h
+ - image_array_type::type == an image object that implements the interface
+ defined in dlib/image_processing/generic_image.h
+ ensures
+ - if (all elements of images have the same dimensions (i.e.
+ for all i and j: get_rect(images[i]) == get_rect(images[j]))) then
+ - returns true
+ - else
+ - returns false
+ !*/
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename image_array_type
+ >
+ double sum_of_rects_in_images (
+ const image_array_type& images,
+ const std::vector<std::pair<unsigned int, rectangle> >& rects,
+ const point& position
+ );
+ /*!
+ requires
+ - image_array_type == an implementation of array/array_kernel_abstract.h
+ - image_array_type::type == an image object that implements the interface
+ defined in dlib/image_processing/generic_image.h. Moreover, these objects must
+ contain a scalar pixel type (e.g. int rather than rgb_pixel)
+ - all_images_same_size(images) == true
+ - for all valid i: rects[i].first < images.size()
+ (i.e. all the rectangles must reference valid elements of images)
+ ensures
+ - returns the sum of the pixels inside the given rectangles. To be precise,
+ let RECT_SUM[i] = sum of pixels inside the rectangle translate_rect(rects[i].second, position)
+ from the image images[rects[i].first]. Then this function returns the
+ sum of RECT_SUM[i] for all the valid values of i.
+ !*/
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename image_array_type
+ >
+ double sum_of_rects_in_images_movable_parts (
+ const image_array_type& images,
+ const rectangle& window,
+ const std::vector<std::pair<unsigned int, rectangle> >& fixed_rects,
+ const std::vector<std::pair<unsigned int, rectangle> >& movable_rects,
+ const point& position
+ );
+ /*!
+ requires
+ - image_array_type == an implementation of array/array_kernel_abstract.h
+ - image_array_type::type == an image object that implements the interface
+ defined in dlib/image_processing/generic_image.h. Moreover, these objects must
+ contain a scalar pixel type (e.g. int rather than rgb_pixel)
+ - all_images_same_size(images) == true
+ - center(window) == point(0,0)
+ - for all valid i:
+ - fixed_rects[i].first < images.size()
+ (i.e. all the rectangles must reference valid elements of images)
+ - for all valid i:
+ - movable_rects[i].first < images.size()
+ (i.e. all the rectangles must reference valid elements of images)
+ - center(movable_rects[i].second) == point(0,0)
+ ensures
+ - returns the sum of the pixels inside fixed_rects as well as the sum of the pixels
+ inside movable_rects when these latter rectangles are placed at their highest
+ scoring locations inside the given window. To be precise:
+ - let RECT_SUM(r,x) = sum of pixels inside the rectangle translate_rect(r.second, x)
+ from the image images[r.first].
+ - let WIN_MAX(i) = The maximum value of RECT_SUM(movable_rects[i],X) when maximizing
+ over all the X such that translate_rect(window,position).contains(X) == true.
+
+ - let TOTAL_FIXED == sum over all elements R in fixed_rects of: RECT_SUM(R,position)
+ - let TOTAL_MOVABLE == sum over all valid i of: max(WIN_MAX(i), 0)
+
+ Then this function returns TOTAL_FIXED + TOTAL_MOVABLE.
+ !*/
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename image_type
+ >
+ void find_points_above_thresh (
+ std::vector<std::pair<double, point> >& dets,
+ const image_type& img,
+ const double thresh,
+ const unsigned long max_dets
+ );
+ /*!
+ requires
+ - image_type == an image object that implements the interface defined in
+ dlib/image_processing/generic_image.h. Moreover, these it must contain a
+ scalar pixel type (e.g. int rather than rgb_pixel)
+ ensures
+ - #dets == a list of points from img which had pixel values >= thresh.
+ - Specifically, we have:
+ - #dets.size() <= max_dets
+ (note that dets is cleared before new detections are added by find_points_above_thresh())
+ - for all valid i:
+ - #dets[i].first == img[#dets[i].second.y()][#dets[i].second.x()]
+ (i.e. the first field contains the value of the pixel at this detection location)
+ - #dets[i].first >= thresh
+ - if (there are more than max_dets locations that pass the above threshold test) then
+ - #dets == a random subsample of all the locations which passed the threshold
+ test.
+ - else
+ - #dets == all the points which passed the threshold test.
+ !*/
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename image_array_type
+ >
+ void scan_image (
+ std::vector<std::pair<double, point> >& dets,
+ const image_array_type& images,
+ const std::vector<std::pair<unsigned int, rectangle> >& rects,
+ const double thresh,
+ const unsigned long max_dets
+ );
+ /*!
+ requires
+ - image_array_type == an implementation of array/array_kernel_abstract.h
+ - image_array_type::type == an image object that implements the interface
+ defined in dlib/image_processing/generic_image.h. Moreover, these objects must
+ contain a scalar pixel type (e.g. int rather than rgb_pixel)
+ - images.size() > 0
+ - rects.size() > 0
+ - all_images_same_size(images) == true
+ - for all valid i: rects[i].first < images.size()
+ (i.e. all the rectangles must reference valid elements of images)
+ ensures
+ - slides the set of rectangles over the image space and reports the locations
+ which give a sum bigger than thresh.
+ - Specifically, we have:
+ - #dets.size() <= max_dets
+ (note that dets is cleared before new detections are added by scan_image())
+ - for all valid i:
+ - #dets[i].first == sum_of_rects_in_images(images,rects,#dets[i].second) >= thresh
+ - if (there are more than max_dets locations that pass the threshold test) then
+ - #dets == a random subsample of all the locations which passed the threshold
+ test.
+ !*/
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename image_array_type
+ >
+ void scan_image_movable_parts (
+ std::vector<std::pair<double, point> >& dets,
+ const image_array_type& images,
+ const rectangle& window,
+ const std::vector<std::pair<unsigned int, rectangle> >& fixed_rects,
+ const std::vector<std::pair<unsigned int, rectangle> >& movable_rects,
+ const double thresh,
+ const unsigned long max_dets
+ );
+ /*!
+ requires
+ - image_array_type == an implementation of array/array_kernel_abstract.h
+ - image_array_type::type == an image object that implements the interface
+ defined in dlib/image_processing/generic_image.h. Moreover, these objects must
+ contain a scalar pixel type (e.g. int rather than rgb_pixel)
+ - images.size() > 0
+ - all_images_same_size(images) == true
+ - center(window) == point(0,0)
+ - window.area() > 0
+ - for all valid i:
+ - fixed_rects[i].first < images.size()
+ (i.e. all the rectangles must reference valid elements of images)
+ - for all valid i:
+ - movable_rects[i].first < images.size()
+ (i.e. all the rectangles must reference valid elements of images)
+ - center(movable_rects[i].second) == point(0,0)
+ - movable_rects[i].second.area() > 0
+ ensures
+ - Scans the given window over the images and reports the locations with a score bigger
+ than thresh.
+ - Specifically, we have:
+ - #dets.size() <= max_dets
+ (note that dets is cleared before new detections are added by scan_image_movable_parts())
+ - for all valid i:
+ - #dets[i].first == sum_of_rects_in_images_movable_parts(images,
+ window,
+ fixed_rects,
+ movable_rects,
+ #dets[i].second) >= thresh
+ - if (there are more than max_dets locations that pass the above threshold test) then
+ - #dets == a random subsample of all the locations which passed the threshold
+ test.
+ !*/
+
+// ----------------------------------------------------------------------------------------
+
+}
+
+#endif // DLIB_SCAN_iMAGE_ABSTRACT_Hh_
+
+
+
diff --git a/ml/dlib/dlib/image_processing/scan_image_boxes.h b/ml/dlib/dlib/image_processing/scan_image_boxes.h
new file mode 100644
index 000000000..f4549565c
--- /dev/null
+++ b/ml/dlib/dlib/image_processing/scan_image_boxes.h
@@ -0,0 +1,630 @@
+// Copyright (C) 2013 Davis E. King (davis@dlib.net)
+// License: Boost Software License See LICENSE.txt for the full license.
+#ifndef DLIB_SCAN_IMAGE_bOXES_Hh_
+#define DLIB_SCAN_IMAGE_bOXES_Hh_
+
+#include "scan_image_boxes_abstract.h"
+#include "../matrix.h"
+#include "../geometry.h"
+#include "../array2d.h"
+#include <vector>
+#include "../image_processing/full_object_detection.h"
+#include "../image_transforms.h"
+
+namespace dlib
+{
+
+// ----------------------------------------------------------------------------------------
+
+ class default_box_generator
+ {
+ public:
+ template <typename image_type>
+ void operator() (
+ const image_type& img,
+ std::vector<rectangle>& rects
+ ) const
+ {
+ rects.clear();
+ find_candidate_object_locations(img, rects);
+ }
+ };
+
+ inline void serialize(const default_box_generator&, std::ostream& ) {}
+ inline void deserialize(default_box_generator&, std::istream& ) {}
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename Feature_extractor_type,
+ typename Box_generator = default_box_generator
+ >
+ class scan_image_boxes : noncopyable
+ {
+
+ public:
+
+ typedef matrix<double,0,1> feature_vector_type;
+
+ typedef Feature_extractor_type feature_extractor_type;
+ typedef Box_generator box_generator;
+
+ scan_image_boxes (
+ );
+
+ template <
+ typename image_type
+ >
+ void load (
+ const image_type& img
+ );
+
+ inline bool is_loaded_with_image (
+ ) const;
+
+ inline void copy_configuration(
+ const feature_extractor_type& fe
+ );
+
+ inline void copy_configuration(
+ const box_generator& bg
+ );
+
+ const box_generator& get_box_generator (
+ ) const { return detect_boxes; }
+
+ const Feature_extractor_type& get_feature_extractor (
+ ) const { return feats; }
+
+ inline void copy_configuration (
+ const scan_image_boxes& item
+ );
+
+ inline long get_num_dimensions (
+ ) const;
+
+ unsigned long get_num_spatial_pyramid_levels (
+ ) const;
+
+ void set_num_spatial_pyramid_levels (
+ unsigned long levels
+ );
+
+ void detect (
+ const feature_vector_type& w,
+ std::vector<std::pair<double, rectangle> >& dets,
+ const double thresh
+ ) const;
+
+ void get_feature_vector (
+ const full_object_detection& obj,
+ feature_vector_type& psi
+ ) const;
+
+ full_object_detection get_full_object_detection (
+ const rectangle& rect,
+ const feature_vector_type& w
+ ) const;
+
+ const rectangle get_best_matching_rect (
+ const rectangle& rect
+ ) const;
+ /*!
+ requires
+ - is_loaded_with_image() == true
+ !*/
+
+ inline unsigned long get_num_detection_templates (
+ ) const { return 1; }
+
+ inline unsigned long get_num_movable_components_per_detection_template (
+ ) const { return 0; }
+
+ template <typename T, typename U>
+ friend void serialize (
+ const scan_image_boxes<T,U>& item,
+ std::ostream& out
+ );
+
+ template <typename T, typename U>
+ friend void deserialize (
+ scan_image_boxes<T,U>& item,
+ std::istream& in
+ );
+
+ private:
+ static bool compare_pair_rect (
+ const std::pair<double, rectangle>& a,
+ const std::pair<double, rectangle>& b
+ )
+ {
+ return a.first < b.first;
+ }
+
+ void test_coordinate_transforms()
+ {
+ for (long x = -10; x <= 10; x += 10)
+ {
+ for (long y = -10; y <= 10; y += 10)
+ {
+ const rectangle rect = centered_rect(x,y,5,6);
+ rectangle a;
+
+ a = feats.image_to_feat_space(rect);
+ if (a.width() > 10000000 || a.height() > 10000000 )
+ {
+ DLIB_CASSERT(false, "The image_to_feat_space() routine is outputting rectangles of an implausibly "
+ << "\nlarge size. This means there is probably a bug in your feature extractor.");
+ }
+ a = feats.feat_to_image_space(rect);
+ if (a.width() > 10000000 || a.height() > 10000000 )
+ {
+ DLIB_CASSERT(false, "The feat_to_image_space() routine is outputting rectangles of an implausibly "
+ << "\nlarge size. This means there is probably a bug in your feature extractor.");
+ }
+ }
+ }
+
+ }
+
+ static void add_grid_rects (
+ std::vector<rectangle>& rects,
+ const rectangle& object_box,
+ unsigned int cells_x,
+ unsigned int cells_y
+ )
+ {
+ // make sure requires clause is not broken
+ DLIB_ASSERT(cells_x > 0 && cells_y > 0,
+ "\t void add_grid_rects()"
+ << "\n\t The number of cells along a dimension can't be zero. "
+ << "\n\t cells_x: " << cells_x
+ << "\n\t cells_y: " << cells_y
+ );
+
+ const matrix_range_exp<double>& x = linspace(object_box.left(), object_box.right(), cells_x+1);
+ const matrix_range_exp<double>& y = linspace(object_box.top(), object_box.bottom(), cells_y+1);
+
+ for (long j = 0; j+1 < y.size(); ++j)
+ {
+ for (long i = 0; i+1 < x.size(); ++i)
+ {
+ const dlib::vector<double,2> tl(x(i),y(j));
+ const dlib::vector<double,2> br(x(i+1),y(j+1));
+ rects.push_back(rectangle(tl,br));
+ }
+ }
+ }
+
+ void get_feature_extraction_regions (
+ const rectangle& rect,
+ std::vector<rectangle>& regions
+ ) const
+ /*!
+ ensures
+ - #regions.size() is always the same number no matter what the input is. The
+ regions also have a consistent ordering.
+ - all the output rectangles are contained within rect.
+ !*/
+ {
+ regions.clear();
+
+ for (unsigned int l = 1; l <= num_spatial_pyramid_levels; ++l)
+ {
+ const int cells = (int)std::pow(2.0, l-1.0);
+ add_grid_rects(regions, rect, cells, cells);
+ }
+ }
+
+ unsigned int get_num_components_per_detection_template(
+ ) const
+ {
+ return (unsigned int)(std::pow(4.0,(double)num_spatial_pyramid_levels)-1)/3;
+ }
+
+ feature_extractor_type feats;
+ std::vector<rectangle> search_rects;
+ bool loaded_with_image;
+ unsigned int num_spatial_pyramid_levels;
+ box_generator detect_boxes;
+
+ const long box_sizedims;
+ const long box_maxsize;
+ };
+
+// ----------------------------------------------------------------------------------------
+
+ template <typename T, typename U>
+ void serialize (
+ const scan_image_boxes<T,U>& item,
+ std::ostream& out
+ )
+ {
+ int version = 1;
+ serialize(version, out);
+ serialize(item.feats, out);
+ serialize(item.search_rects, out);
+ serialize(item.loaded_with_image, out);
+ serialize(item.num_spatial_pyramid_levels, out);
+ serialize(item.detect_boxes, out);
+ serialize(item.get_num_dimensions(), out);
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <typename T, typename U>
+ void deserialize (
+ scan_image_boxes<T,U>& item,
+ std::istream& in
+ )
+ {
+ int version = 0;
+ deserialize(version, in);
+ if (version != 1)
+ throw serialization_error("Unsupported version found when deserializing a scan_image_boxes object.");
+
+ deserialize(item.feats, in);
+ deserialize(item.search_rects, in);
+ deserialize(item.loaded_with_image, in);
+ deserialize(item.num_spatial_pyramid_levels, in);
+ deserialize(item.detect_boxes, in);
+
+ // When developing some feature extractor, it's easy to accidentally change its
+ // number of dimensions and then try to deserialize data from an older version of
+ // your extractor into the current code. This check is here to catch that kind of
+ // user error.
+ long dims;
+ deserialize(dims, in);
+ if (item.get_num_dimensions() != dims)
+ throw serialization_error("Number of dimensions in serialized scan_image_boxes doesn't match the expected number.");
+ }
+
+// ----------------------------------------------------------------------------------------
+// ----------------------------------------------------------------------------------------
+// scan_image_boxes member functions
+// ----------------------------------------------------------------------------------------
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename Feature_extractor_type,
+ typename Box_generator
+ >
+ scan_image_boxes<Feature_extractor_type,Box_generator>::
+ scan_image_boxes (
+ ) :
+ loaded_with_image(false),
+ num_spatial_pyramid_levels(3),
+ box_sizedims(20),
+ box_maxsize(1200)
+ {
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename Feature_extractor_type,
+ typename Box_generator
+ >
+ template <
+ typename image_type
+ >
+ void scan_image_boxes<Feature_extractor_type,Box_generator>::
+ load (
+ const image_type& img
+ )
+ {
+ feats.load(img);
+ detect_boxes(img, search_rects);
+ loaded_with_image = true;
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename Feature_extractor_type,
+ typename Box_generator
+ >
+ bool scan_image_boxes<Feature_extractor_type,Box_generator>::
+ is_loaded_with_image (
+ ) const
+ {
+ return loaded_with_image;
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename Feature_extractor_type,
+ typename Box_generator
+ >
+ void scan_image_boxes<Feature_extractor_type,Box_generator>::
+ copy_configuration(
+ const feature_extractor_type& fe
+ )
+ {
+ test_coordinate_transforms();
+ feats.copy_configuration(fe);
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename Feature_extractor_type,
+ typename Box_generator
+ >
+ void scan_image_boxes<Feature_extractor_type,Box_generator>::
+ copy_configuration(
+ const box_generator& bg
+ )
+ {
+ detect_boxes = bg;
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename Feature_extractor_type,
+ typename Box_generator
+ >
+ void scan_image_boxes<Feature_extractor_type,Box_generator>::
+ copy_configuration (
+ const scan_image_boxes& item
+ )
+ {
+ feats.copy_configuration(item.feats);
+ detect_boxes = item.detect_boxes;
+ num_spatial_pyramid_levels = item.num_spatial_pyramid_levels;
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename Feature_extractor_type,
+ typename Box_generator
+ >
+ unsigned long scan_image_boxes<Feature_extractor_type,Box_generator>::
+ get_num_spatial_pyramid_levels (
+ ) const
+ {
+ return num_spatial_pyramid_levels;
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename Feature_extractor_type,
+ typename Box_generator
+ >
+ void scan_image_boxes<Feature_extractor_type,Box_generator>::
+ set_num_spatial_pyramid_levels (
+ unsigned long levels
+ )
+ {
+ // make sure requires clause is not broken
+ DLIB_ASSERT(levels > 0,
+ "\t void scan_image_boxes::set_num_spatial_pyramid_levels()"
+ << "\n\t Invalid inputs were given to this function "
+ << "\n\t levels: " << levels
+ << "\n\t this: " << this
+ );
+
+
+ num_spatial_pyramid_levels = levels;
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename Feature_extractor_type,
+ typename Box_generator
+ >
+ long scan_image_boxes<Feature_extractor_type,Box_generator>::
+ get_num_dimensions (
+ ) const
+ {
+ return feats.get_num_dimensions()*get_num_components_per_detection_template() + box_sizedims*2;
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename Feature_extractor_type,
+ typename Box_generator
+ >
+ void scan_image_boxes<Feature_extractor_type,Box_generator>::
+ detect (
+ const feature_vector_type& w,
+ std::vector<std::pair<double, rectangle> >& dets,
+ const double thresh
+ ) const
+ {
+ // make sure requires clause is not broken
+ DLIB_ASSERT(is_loaded_with_image() &&
+ w.size() >= get_num_dimensions(),
+ "\t void scan_image_boxes::detect()"
+ << "\n\t Invalid inputs were given to this function "
+ << "\n\t is_loaded_with_image(): " << is_loaded_with_image()
+ << "\n\t w.size(): " << w.size()
+ << "\n\t get_num_dimensions(): " << get_num_dimensions()
+ << "\n\t this: " << this
+ );
+
+ dets.clear();
+
+ array<integral_image_generic<double> > saliency_images(get_num_components_per_detection_template());
+
+ array2d<double> temp_img(feats.nr(), feats.nc());
+
+ // build saliency images
+ for (unsigned long i = 0; i < saliency_images.size(); ++i)
+ {
+ const unsigned long offset = 2*box_sizedims + feats.get_num_dimensions()*i;
+
+ // make the basic saliency image for the i-th feature extraction region
+ for (long r = 0; r < feats.nr(); ++r)
+ {
+ for (long c = 0; c < feats.nc(); ++c)
+ {
+ const typename feature_extractor_type::descriptor_type& descriptor = feats(r,c);
+
+ double sum = 0;
+ for (unsigned long k = 0; k < descriptor.size(); ++k)
+ {
+ sum += w(descriptor[k].first + offset)*descriptor[k].second;
+ }
+ temp_img[r][c] = sum;
+ }
+ }
+
+ // now convert base saliency image into final integral image
+ saliency_images[i].load(temp_img);
+ }
+
+
+ // now search the saliency images
+ std::vector<rectangle> regions;
+ const rectangle bounds = get_rect(feats);
+ for (unsigned long i = 0; i < search_rects.size(); ++i)
+ {
+ const rectangle rect = feats.image_to_feat_space(search_rects[i]).intersect(bounds);
+ if (rect.is_empty())
+ continue;
+ get_feature_extraction_regions(rect, regions);
+ double score = 0;
+ for (unsigned long k = 0; k < regions.size(); ++k)
+ {
+ score += saliency_images[k].get_sum_of_area(regions[k]);
+ }
+ const double width = search_rects[i].width();
+ const double height = search_rects[i].height();
+
+ score += dot(linpiece(width, linspace(0, box_maxsize, box_sizedims+1)), rowm(w, range(0,box_sizedims-1)));
+ score += dot(linpiece(height, linspace(0, box_maxsize, box_sizedims+1)), rowm(w, range(box_sizedims,2*box_sizedims-1)));
+
+ if (score >= thresh)
+ {
+ dets.push_back(std::make_pair(score, search_rects[i]));
+ }
+ }
+
+ std::sort(dets.rbegin(), dets.rend(), compare_pair_rect);
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename Feature_extractor_type,
+ typename Box_generator
+ >
+ const rectangle scan_image_boxes<Feature_extractor_type,Box_generator>::
+ get_best_matching_rect (
+ const rectangle& rect
+ ) const
+ {
+ // make sure requires clause is not broken
+ DLIB_ASSERT(is_loaded_with_image(),
+ "\t const rectangle scan_image_boxes::get_best_matching_rect()"
+ << "\n\t Invalid inputs were given to this function "
+ << "\n\t is_loaded_with_image(): " << is_loaded_with_image()
+ << "\n\t this: " << this
+ );
+
+
+ double best_score = -1;
+ rectangle best_rect;
+ for (unsigned long i = 0; i < search_rects.size(); ++i)
+ {
+ const double score = (rect.intersect(search_rects[i])).area()/(double)(rect+search_rects[i]).area();
+ if (score > best_score)
+ {
+ best_score = score;
+ best_rect = search_rects[i];
+ }
+ }
+ return best_rect;
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename Feature_extractor_type,
+ typename Box_generator
+ >
+ full_object_detection scan_image_boxes<Feature_extractor_type,Box_generator>::
+ get_full_object_detection (
+ const rectangle& rect,
+ const feature_vector_type& /*w*/
+ ) const
+ {
+ return full_object_detection(rect);
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename Feature_extractor_type,
+ typename Box_generator
+ >
+ void scan_image_boxes<Feature_extractor_type,Box_generator>::
+ get_feature_vector (
+ const full_object_detection& obj,
+ feature_vector_type& psi
+ ) const
+ {
+ // make sure requires clause is not broken
+ DLIB_ASSERT(is_loaded_with_image() &&
+ psi.size() >= get_num_dimensions() &&
+ obj.num_parts() == 0,
+ "\t void scan_image_boxes::get_feature_vector()"
+ << "\n\t Invalid inputs were given to this function "
+ << "\n\t is_loaded_with_image(): " << is_loaded_with_image()
+ << "\n\t psi.size(): " << psi.size()
+ << "\n\t get_num_dimensions(): " << get_num_dimensions()
+ << "\n\t obj.num_parts(): " << obj.num_parts()
+ << "\n\t this: " << this
+ );
+
+
+
+ const rectangle best_rect = get_best_matching_rect(obj.get_rect());
+ const rectangle mapped_rect = feats.image_to_feat_space(best_rect).intersect(get_rect(feats));
+ if (mapped_rect.is_empty())
+ return;
+
+ std::vector<rectangle> regions;
+ get_feature_extraction_regions(mapped_rect, regions);
+
+ // pull features out of all the boxes in regions.
+ for (unsigned long j = 0; j < regions.size(); ++j)
+ {
+ const rectangle rect = regions[j];
+
+ const unsigned long template_region_id = j;
+ const unsigned long offset = box_sizedims*2 + feats.get_num_dimensions()*template_region_id;
+ for (long r = rect.top(); r <= rect.bottom(); ++r)
+ {
+ for (long c = rect.left(); c <= rect.right(); ++c)
+ {
+ const typename feature_extractor_type::descriptor_type& descriptor = feats(r,c);
+ for (unsigned long k = 0; k < descriptor.size(); ++k)
+ {
+ psi(descriptor[k].first + offset) += descriptor[k].second;
+ }
+ }
+ }
+ }
+
+ const double width = best_rect.width();
+ const double height = best_rect.height();
+ set_rowm(psi, range(0,box_sizedims-1)) += linpiece(width, linspace(0, box_maxsize, box_sizedims+1));
+ set_rowm(psi, range(box_sizedims,box_sizedims*2-1)) += linpiece(height, linspace(0, box_maxsize, box_sizedims+1));
+ }
+
+// ----------------------------------------------------------------------------------------
+
+}
+
+#endif // DLIB_SCAN_IMAGE_bOXES_Hh_
+
+
+
diff --git a/ml/dlib/dlib/image_processing/scan_image_boxes_abstract.h b/ml/dlib/dlib/image_processing/scan_image_boxes_abstract.h
new file mode 100644
index 000000000..e2f16aa76
--- /dev/null
+++ b/ml/dlib/dlib/image_processing/scan_image_boxes_abstract.h
@@ -0,0 +1,394 @@
+// Copyright (C) 2013 Davis E. King (davis@dlib.net)
+// License: Boost Software License See LICENSE.txt for the full license.
+#undef DLIB_SCAN_IMAGE_bOXES_ABSTRACT_Hh_
+#ifdef DLIB_SCAN_IMAGE_bOXES_ABSTRACT_Hh_
+
+#include "../matrix.h"
+#include "../geometry.h"
+#include "../image_processing.h"
+#include "../array2d.h"
+#include "full_object_detection_abstract.h"
+#include "../image_transforms/segment_image_abstract.h"
+#include <vector>
+
+namespace dlib
+{
+
+// ----------------------------------------------------------------------------------------
+
+ class default_box_generator
+ {
+ /*!
+ WHAT THIS OBJECT REPRESENTS
+ This is a function object that takes in an image and outputs a set of
+ candidate object locations. It is also the default box generator used by
+ the scan_image_boxes object defined below.
+ !*/
+
+ public:
+
+ template <typename image_type>
+ void operator() (
+ const image_type& img,
+ std::vector<rectangle>& rects
+ ) const
+ /*!
+ ensures
+ - #rects == the set of candidate object locations which should be searched
+ inside img. That is, these are the rectangles which might contain
+ objects of interest within the given image.
+ !*/
+ {
+ rects.clear();
+ find_candidate_object_locations(img, rects);
+ }
+ };
+
+ inline void serialize (const default_box_generator&, std::ostream& ) {}
+ inline void deserialize( default_box_generator&, std::istream& ) {}
+ /*!
+ ensures
+ - provides serialization support.
+ !*/
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename Feature_extractor_type,
+ typename Box_generator = default_box_generator
+ >
+ class scan_image_boxes : noncopyable
+ {
+ /*!
+ REQUIREMENTS ON Feature_extractor_type
+ - must be an object with an interface compatible with the hashed_feature_image
+ object defined in dlib/image_keypoint/hashed_feature_image_abstract.h or
+ with the nearest_neighbor_feature_image object defined in
+ dlib/image_keypoint/nearest_neighbor_feature_image_abstract.h
+
+ REQUIREMENTS ON Box_generator
+ - must be an object with an interface compatible with the
+ default_box_generator object defined at the top of this file.
+
+ INITIAL VALUE
+ - get_num_spatial_pyramid_levels() == 3
+ - is_loaded_with_image() == false
+
+ WHAT THIS OBJECT REPRESENTS
+ This object is a tool for running a classifier over an image with the goal
+ of localizing each object present. The localization is in the form of the
+ bounding box around each object of interest.
+
+ Unlike the scan_image_pyramid object which scans a fixed sized window over
+ an image pyramid, the scan_image_boxes tool allows you to define your own
+ list of "candidate object locations" which should be evaluated. This is
+ simply a list of rectangle objects which might contain objects of interest.
+ The scan_image_boxes object will then evaluate the classifier at each of
+ these locations and return the subset of rectangles which appear to have
+ objects in them. The candidate object location generation is provided by
+ the Box_generator that is passed in as a template argument.
+
+ This object can also be understood as a general tool for implementing the
+ spatial pyramid models described in the paper:
+ Beyond Bags of Features: Spatial Pyramid Matching for Recognizing
+ Natural Scene Categories by Svetlana Lazebnik, Cordelia Schmid,
+ and Jean Ponce
+
+
+ The classifiers used by this object have three parts:
+ 1. The underlying feature extraction provided by Feature_extractor_type
+ objects, which associate a vector with each location in an image.
+
+ 2. A rule for extracting a feature vector from a candidate object
+ location. In this object we use the spatial pyramid matching method.
+ This means we cut an object's detection window into a set of "feature
+ extraction regions" and extract a bag-of-words vector from each
+ before finally concatenating them to form the final feature vector
+ representing the entire object window. The set of feature extraction
+ regions can be configured by the user by calling
+ set_num_spatial_pyramid_levels(). To be a little more precise, the
+ feature vector for a candidate object window is defined as follows:
+ - Let N denote the number of feature extraction zones.
+ - Let M denote the dimensionality of the vectors output by
+ Feature_extractor_type objects.
+ - Let F(i) == the M dimensional vector which is the sum of all
+ vectors given by our Feature_extractor_type object inside the
+ i-th feature extraction zone. So this is notionally a
+ bag-of-words vector from the i-th zone.
+ - Then the feature vector for an object window is an M*N
+ dimensional vector [F(1) F(2) F(3) ... F(N)] (i.e. it is a
+ concatenation of the N vectors). This feature vector can be
+ thought of as a collection of N bags-of-words, each bag coming
+ from a spatial location determined by one of the feature
+ extraction zones.
+
+ 3. A weight vector and a threshold value. The dot product between the
+ weight vector and the feature vector for a candidate object location
+ gives the score of the location. If this score is greater than the
+ threshold value then the candidate object location is output as a
+ detection.
+
+ THREAD SAFETY
+ Concurrent access to an instance of this object is not safe and should be
+ protected by a mutex lock except for the case where you are copying the
+ configuration (via copy_configuration()) of a scan_image_boxes object to
+ many other threads. In this case, it is safe to copy the configuration of
+ a shared object so long as no other operations are performed on it.
+ !*/
+
+ public:
+
+ typedef matrix<double,0,1> feature_vector_type;
+
+ typedef Feature_extractor_type feature_extractor_type;
+ typedef Box_generator box_generator;
+
+ scan_image_boxes (
+ );
+ /*!
+ ensures
+ - this object is properly initialized
+ !*/
+
+ template <
+ typename image_type
+ >
+ void load (
+ const image_type& img
+ );
+ /*!
+ requires
+ - image_type must be a type with the following properties:
+ - image_type objects can be loaded into Feature_extractor_type
+ objects via Feature_extractor_type::load().
+ - image_type objects can be passed to the first argument of
+ Box_generator::operator()
+ ensures
+ - #is_loaded_with_image() == true
+ - This object is ready to run a classifier over img to detect object
+ locations. Call detect() to do this.
+ !*/
+
+ bool is_loaded_with_image (
+ ) const;
+ /*!
+ ensures
+ - returns true if this object has been loaded with an image to process and
+ false otherwise.
+ !*/
+
+ const feature_extractor_type& get_feature_extractor (
+ ) const;
+ /*!
+ ensures
+ - returns a const reference to the feature_extractor_type object used
+ internally for local feature extraction.
+ !*/
+
+ void copy_configuration(
+ const feature_extractor_type& fe
+ );
+ /*!
+ ensures
+ - This function performs the equivalent of
+ get_feature_extractor().copy_configuration(fe) (i.e. this function allows
+ you to configure the parameters of the underlying feature extractor used
+ by a scan_image_boxes object)
+ !*/
+
+ void copy_configuration(
+ const box_generator& bg
+ );
+ /*!
+ ensures
+ - #get_box_generator() == bg
+ (i.e. this function allows you to configure the parameters of the
+ underlying box generator used by a scan_image_boxes object)
+ !*/
+
+ const box_generator& get_box_generator (
+ ) const;
+ /*!
+ ensures
+ - returns the box_generator used by this object to generate candidate
+ object locations.
+ !*/
+
+ void copy_configuration (
+ const scan_image_boxes& item
+ );
+ /*!
+ ensures
+ - Copies all the state information of item into *this, except for state
+ information populated by load(). More precisely, given two scan_image_boxes
+ objects S1 and S2, the following sequence of instructions should always
+ result in both of them having the exact same state:
+ S2.copy_configuration(S1);
+ S1.load(img);
+ S2.load(img);
+ !*/
+
+ long get_num_dimensions (
+ ) const;
+ /*!
+ ensures
+ - returns the number of dimensions in the feature vector for a candidate
+ object location. This value is the dimensionality of the underlying
+ feature vectors produced by Feature_extractor_type times the number of
+ feature extraction regions used. Note that the number of feature
+ extraction regions used is a function of
+ get_num_spatial_pyramid_levels().
+ !*/
+
+ unsigned long get_num_spatial_pyramid_levels (
+ ) const;
+ /*!
+ ensures
+ - returns the number of layers in the spatial pyramid. For example, if
+ this function returns 1 then it means we use a simple bag-of-words
+ representation over the whole object window. If it returns 2 then it
+ means the feature representation is the concatenation of 5 bag-of-words
+ vectors, one from the entire object window and 4 others from 4 different
+ parts of the object window. If it returns 3 then there are 1+4+16
+ bag-of-words vectors concatenated together in the feature representation,
+ and so on.
+ !*/
+
+ void set_num_spatial_pyramid_levels (
+ unsigned long levels
+ );
+ /*!
+ requires
+ - levels > 0
+ ensures
+ - #get_num_spatial_pyramid_levels() == levels
+ !*/
+
+ void detect (
+ const feature_vector_type& w,
+ std::vector<std::pair<double, rectangle> >& dets,
+ const double thresh
+ ) const;
+ /*!
+ requires
+ - w.size() >= get_num_dimensions()
+ - is_loaded_with_image() == true
+ ensures
+ - Scans over all the candidate object locations as discussed in the WHAT
+ THIS OBJECT REPRESENTS section and stores all detections into #dets.
+ - for all valid i:
+ - #dets[i].second == The candidate object location which produced this
+ detection. This rectangle gives the location of the detection.
+ - #dets[i].first == The score for this detection. This value is equal
+ to dot(w, feature vector for this candidate object location).
+ - #dets[i].first >= thresh
+ - #dets will be sorted in descending order.
+ (i.e. #dets[i].first >= #dets[j].first for all i, and j>i)
+ - Elements of w beyond index get_num_dimensions()-1 are ignored. I.e. only
+ the first get_num_dimensions() are used.
+ - Note that no form of non-max suppression is performed. If a locations
+ has a score >= thresh then it is reported in #dets.
+ !*/
+
+ void get_feature_vector (
+ const full_object_detection& obj,
+ feature_vector_type& psi
+ ) const;
+ /*!
+ requires
+ - obj.num_parts() == 0
+ - is_loaded_with_image() == true
+ - psi.size() >= get_num_dimensions()
+ (i.e. psi must have preallocated its memory before this function is called)
+ ensures
+ - This function allows you to determine the feature vector used for a
+ candidate object location output from detect(). Note that this vector is
+ added to psi. Note also that you must use get_full_object_detection() to
+ convert a rectangle from detect() into the needed full_object_detection.
+ - The dimensionality of the vector added to psi is get_num_dimensions(). This
+ means that elements of psi after psi(get_num_dimensions()-1) are not modified.
+ - Since scan_image_boxes only searches a limited set of object locations,
+ not all possible rectangles can be output by detect(). So in the case
+ where obj.get_rect() could not arise from a call to detect(), this
+ function will map obj.get_rect() to the nearest possible rectangle and
+ then add the feature vector for the mapped rectangle into #psi.
+ - get_best_matching_rect(obj.get_rect()) == the rectangle obj.get_rect()
+ gets mapped to for feature extraction.
+ !*/
+
+ full_object_detection get_full_object_detection (
+ const rectangle& rect,
+ const feature_vector_type& w
+ ) const;
+ /*!
+ ensures
+ - returns full_object_detection(rect)
+ (This function is here only for compatibility with the scan_image_pyramid
+ object)
+ !*/
+
+ const rectangle get_best_matching_rect (
+ const rectangle& rect
+ ) const;
+ /*!
+ requires
+ - is_loaded_with_image() == true
+ ensures
+ - Since scan_image_boxes only searches a limited set of object locations,
+ not all possible rectangles can be represented. Therefore, this function
+ allows you to supply a rectangle and obtain the nearest possible
+ candidate object location rectangle.
+ !*/
+
+ unsigned long get_num_detection_templates (
+ ) const { return 1; }
+ /*!
+ ensures
+ - returns 1. Note that this function is here only for compatibility with
+ the scan_image_pyramid object. Notionally, its return value indicates
+ that a scan_image_boxes object is always ready to detect objects once
+ an image has been loaded.
+ !*/
+
+ unsigned long get_num_movable_components_per_detection_template (
+ ) const { return 0; }
+ /*!
+ ensures
+ - returns 0. Note that this function is here only for compatibility with
+ the scan_image_pyramid object. Its return value means that this object
+ does not support using movable part models.
+ !*/
+ };
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename Feature_extractor_type,
+ typename Box_generator
+ >
+ void serialize (
+ const scan_image_boxes<Feature_extractor_type,Box_generator>& item,
+ std::ostream& out
+ );
+ /*!
+ provides serialization support
+ !*/
+
+ template <
+ typename Feature_extractor_type,
+ typename Box_generator
+ >
+ void deserialize (
+ scan_image_boxes<Feature_extractor_type,Box_generator>& item,
+ std::istream& in
+ );
+ /*!
+ provides deserialization support
+ !*/
+
+// ----------------------------------------------------------------------------------------
+
+}
+
+#endif // DLIB_SCAN_IMAGE_bOXES_ABSTRACT_Hh_
+
diff --git a/ml/dlib/dlib/image_processing/scan_image_custom.h b/ml/dlib/dlib/image_processing/scan_image_custom.h
new file mode 100644
index 000000000..29b969fca
--- /dev/null
+++ b/ml/dlib/dlib/image_processing/scan_image_custom.h
@@ -0,0 +1,401 @@
+// Copyright (C) 2013 Davis E. King (davis@dlib.net)
+// License: Boost Software License See LICENSE.txt for the full license.
+#ifndef DLIB_SCAN_IMAGE_CuSTOM_Hh_
+#define DLIB_SCAN_IMAGE_CuSTOM_Hh_
+
+#include "scan_image_custom_abstract.h"
+#include "../matrix.h"
+#include "../geometry.h"
+#include <vector>
+#include "../image_processing/full_object_detection.h"
+
+namespace dlib
+{
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename Feature_extractor_type
+ >
+ class scan_image_custom : noncopyable
+ {
+
+ public:
+
+ typedef matrix<double,0,1> feature_vector_type;
+ typedef Feature_extractor_type feature_extractor_type;
+
+ scan_image_custom (
+ );
+
+ template <
+ typename image_type
+ >
+ void load (
+ const image_type& img
+ );
+
+ inline bool is_loaded_with_image (
+ ) const;
+
+ inline void copy_configuration(
+ const feature_extractor_type& fe
+ );
+
+ const Feature_extractor_type& get_feature_extractor (
+ ) const { return feats; }
+
+ inline void copy_configuration (
+ const scan_image_custom& item
+ );
+
+ inline long get_num_dimensions (
+ ) const;
+
+ void detect (
+ const feature_vector_type& w,
+ std::vector<std::pair<double, rectangle> >& dets,
+ const double thresh
+ ) const;
+
+ void get_feature_vector (
+ const full_object_detection& obj,
+ feature_vector_type& psi
+ ) const;
+
+ full_object_detection get_full_object_detection (
+ const rectangle& rect,
+ const feature_vector_type& w
+ ) const;
+
+ const rectangle get_best_matching_rect (
+ const rectangle& rect
+ ) const;
+
+ inline unsigned long get_num_detection_templates (
+ ) const { return 1; }
+
+ inline unsigned long get_num_movable_components_per_detection_template (
+ ) const { return 0; }
+
+ template <typename T>
+ friend void serialize (
+ const scan_image_custom<T>& item,
+ std::ostream& out
+ );
+
+ template <typename T>
+ friend void deserialize (
+ scan_image_custom<T>& item,
+ std::istream& in
+ );
+
+ private:
+ static bool compare_pair_rect (
+ const std::pair<double, rectangle>& a,
+ const std::pair<double, rectangle>& b
+ )
+ {
+ return a.first < b.first;
+ }
+
+
+ DLIB_MAKE_HAS_MEMBER_FUNCTION_TEST(
+ has_compute_object_score,
+ double,
+ compute_object_score,
+ ( const matrix<double,0,1>& w, const rectangle& obj) const
+ );
+
+ template <typename fe_type>
+ typename enable_if<has_compute_object_score<fe_type> >::type compute_all_rect_scores (
+ const fe_type& feats,
+ const feature_vector_type& w,
+ std::vector<std::pair<double, rectangle> >& dets,
+ const double thresh
+ ) const
+ {
+ for (unsigned long i = 0; i < search_rects.size(); ++i)
+ {
+ const double score = feats.compute_object_score(w, search_rects[i]);
+ if (score >= thresh)
+ {
+ dets.push_back(std::make_pair(score, search_rects[i]));
+ }
+ }
+ }
+
+ template <typename fe_type>
+ typename disable_if<has_compute_object_score<fe_type> >::type compute_all_rect_scores (
+ const fe_type& feats,
+ const feature_vector_type& w,
+ std::vector<std::pair<double, rectangle> >& dets,
+ const double thresh
+ ) const
+ {
+ matrix<double,0,1> psi(w.size());
+ psi = 0;
+ double prev_dot = 0;
+ for (unsigned long i = 0; i < search_rects.size(); ++i)
+ {
+ // Reset these back to zero every so often to avoid the accumulation of
+ // rounding error. Note that the only reason we do this loop in this
+ // complex way is to avoid needing to zero the psi vector every iteration.
+ if ((i%500) == 499)
+ {
+ psi = 0;
+ prev_dot = 0;
+ }
+
+ feats.get_feature_vector(search_rects[i], psi);
+ const double cur_dot = dot(psi, w);
+ const double score = cur_dot - prev_dot;
+ if (score >= thresh)
+ {
+ dets.push_back(std::make_pair(score, search_rects[i]));
+ }
+ prev_dot = cur_dot;
+ }
+ }
+
+
+ feature_extractor_type feats;
+ std::vector<rectangle> search_rects;
+ bool loaded_with_image;
+ };
+
+// ----------------------------------------------------------------------------------------
+
+ template <typename T>
+ void serialize (
+ const scan_image_custom<T>& item,
+ std::ostream& out
+ )
+ {
+ int version = 1;
+ serialize(version, out);
+ serialize(item.feats, out);
+ serialize(item.search_rects, out);
+ serialize(item.loaded_with_image, out);
+ serialize(item.get_num_dimensions(), out);
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <typename T>
+ void deserialize (
+ scan_image_custom<T>& item,
+ std::istream& in
+ )
+ {
+ int version = 0;
+ deserialize(version, in);
+ if (version != 1)
+ throw serialization_error("Unsupported version found when deserializing a scan_image_custom object.");
+
+ deserialize(item.feats, in);
+ deserialize(item.search_rects, in);
+ deserialize(item.loaded_with_image, in);
+
+ // When developing some feature extractor, it's easy to accidentally change its
+ // number of dimensions and then try to deserialize data from an older version of
+ // your extractor into the current code. This check is here to catch that kind of
+ // user error.
+ long dims;
+ deserialize(dims, in);
+ if (item.get_num_dimensions() != dims)
+ throw serialization_error("Number of dimensions in serialized scan_image_custom doesn't match the expected number.");
+ }
+
+// ----------------------------------------------------------------------------------------
+// ----------------------------------------------------------------------------------------
+// scan_image_custom member functions
+// ----------------------------------------------------------------------------------------
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename Feature_extractor_type
+ >
+ scan_image_custom<Feature_extractor_type>::
+ scan_image_custom (
+ ) :
+ loaded_with_image(false)
+ {
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename Feature_extractor_type
+ >
+ template <
+ typename image_type
+ >
+ void scan_image_custom<Feature_extractor_type>::
+ load (
+ const image_type& img
+ )
+ {
+ feats.load(img, search_rects);
+ loaded_with_image = true;
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename Feature_extractor_type
+ >
+ bool scan_image_custom<Feature_extractor_type>::
+ is_loaded_with_image (
+ ) const
+ {
+ return loaded_with_image;
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename Feature_extractor_type
+ >
+ void scan_image_custom<Feature_extractor_type>::
+ copy_configuration(
+ const feature_extractor_type& fe
+ )
+ {
+ feats.copy_configuration(fe);
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename Feature_extractor_type
+ >
+ void scan_image_custom<Feature_extractor_type>::
+ copy_configuration (
+ const scan_image_custom& item
+ )
+ {
+ feats.copy_configuration(item.feats);
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename Feature_extractor_type
+ >
+ long scan_image_custom<Feature_extractor_type>::
+ get_num_dimensions (
+ ) const
+ {
+ return feats.get_num_dimensions();
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename Feature_extractor_type
+ >
+ void scan_image_custom<Feature_extractor_type>::
+ detect (
+ const feature_vector_type& w,
+ std::vector<std::pair<double, rectangle> >& dets,
+ const double thresh
+ ) const
+ {
+ // make sure requires clause is not broken
+ DLIB_ASSERT(is_loaded_with_image() &&
+ w.size() >= get_num_dimensions(),
+ "\t void scan_image_custom::detect()"
+ << "\n\t Invalid inputs were given to this function "
+ << "\n\t is_loaded_with_image(): " << is_loaded_with_image()
+ << "\n\t w.size(): " << w.size()
+ << "\n\t get_num_dimensions(): " << get_num_dimensions()
+ << "\n\t this: " << this
+ );
+
+ dets.clear();
+ compute_all_rect_scores(feats, w,dets,thresh);
+ std::sort(dets.rbegin(), dets.rend(), compare_pair_rect);
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename Feature_extractor_type
+ >
+ const rectangle scan_image_custom<Feature_extractor_type>::
+ get_best_matching_rect (
+ const rectangle& rect
+ ) const
+ {
+ // make sure requires clause is not broken
+ DLIB_ASSERT(is_loaded_with_image(),
+ "\t const rectangle scan_image_custom::get_best_matching_rect()"
+ << "\n\t Invalid inputs were given to this function "
+ << "\n\t is_loaded_with_image(): " << is_loaded_with_image()
+ << "\n\t this: " << this
+ );
+
+
+ double best_score = -1;
+ rectangle best_rect;
+ for (unsigned long i = 0; i < search_rects.size(); ++i)
+ {
+ const double score = (rect.intersect(search_rects[i])).area()/(double)(rect+search_rects[i]).area();
+ if (score > best_score)
+ {
+ best_score = score;
+ best_rect = search_rects[i];
+ }
+ }
+ return best_rect;
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename Feature_extractor_type
+ >
+ full_object_detection scan_image_custom<Feature_extractor_type>::
+ get_full_object_detection (
+ const rectangle& rect,
+ const feature_vector_type& /*w*/
+ ) const
+ {
+ return full_object_detection(rect);
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename Feature_extractor_type
+ >
+ void scan_image_custom<Feature_extractor_type>::
+ get_feature_vector (
+ const full_object_detection& obj,
+ feature_vector_type& psi
+ ) const
+ {
+ // make sure requires clause is not broken
+ DLIB_ASSERT(is_loaded_with_image() &&
+ psi.size() >= get_num_dimensions() &&
+ obj.num_parts() == 0,
+ "\t void scan_image_custom::get_feature_vector()"
+ << "\n\t Invalid inputs were given to this function "
+ << "\n\t is_loaded_with_image(): " << is_loaded_with_image()
+ << "\n\t psi.size(): " << psi.size()
+ << "\n\t get_num_dimensions(): " << get_num_dimensions()
+ << "\n\t obj.num_parts(): " << obj.num_parts()
+ << "\n\t this: " << this
+ );
+
+
+ feats.get_feature_vector(get_best_matching_rect(obj.get_rect()), psi);
+ }
+
+// ----------------------------------------------------------------------------------------
+
+}
+
+#endif // DLIB_SCAN_IMAGE_CuSTOM_Hh_
+
diff --git a/ml/dlib/dlib/image_processing/scan_image_custom_abstract.h b/ml/dlib/dlib/image_processing/scan_image_custom_abstract.h
new file mode 100644
index 000000000..ca3ba402a
--- /dev/null
+++ b/ml/dlib/dlib/image_processing/scan_image_custom_abstract.h
@@ -0,0 +1,390 @@
+// Copyright (C) 2013 Davis E. King (davis@dlib.net)
+// License: Boost Software License See LICENSE.txt for the full license.
+#undef DLIB_SCAN_IMAGE_CuSTOM_ABSTRACT_Hh_
+#ifdef DLIB_SCAN_IMAGE_CuSTOM_ABSTRACT_Hh_
+
+#include <vector>
+#include "../matrix.h"
+#include "../geometry.h"
+#include "../image_processing/full_object_detection_abstract.h"
+
+namespace dlib
+{
+
+// ----------------------------------------------------------------------------------------
+
+ class example_feature_extractor
+ {
+ /*!
+ WHAT THIS OBJECT REPRESENTS
+ This object defines the interface a feature extractor must implement if it
+ is to be used with the scan_image_custom object defined at the bottom of
+ this file.
+
+ In this case, the purpose of a feature extractor is to associated a
+ complete feature vector with each rectangle in an image. In particular,
+ each rectangle is scored by taking the dot product between this feature
+ vector and a weight vector. If this score is greater than a threshold then
+ the rectangle is output as a detection.
+ !*/
+
+ public:
+
+ template <
+ typename image_type
+ >
+ void load (
+ const image_type& image,
+ std::vector<rectangle>& candidate_objects
+ );
+ /*!
+ ensures
+ - Loads the given image into this feature extractor. This means that
+ subsequent calls to get_feature_vector() will return the feature vector
+ corresponding to locations in the image given to load().
+ - #candidate_objects == a set of bounding boxes in the given image that
+ might contain objects of interest. These are the locations that will be
+ checked for the presents of objects when this feature extractor is used
+ with the scan_image_custom object.
+
+ !*/
+
+ void copy_configuration (
+ const feature_extractor& item
+ );
+ /*!
+ ensures
+ - Copies all the state information of item into *this, except for state
+ information populated by load(). More precisely, given two
+ feature extractor objects S1 and S2, the following sequence of
+ instructions should always result in both of them having the exact same
+ state:
+ S2.copy_configuration(S1);
+ S1.load(img, temp);
+ S2.load(img, temp);
+ !*/
+
+ unsigned long get_num_dimensions (
+ ) const;
+ /*!
+ ensures
+ - returns the dimensionality of the feature vectors output by this object.
+ !*/
+
+ void get_feature_vector (
+ const rectangle& obj,
+ matrix<double,0,1>& psi
+ ) const;
+ /*!
+ requires
+ - psi.size() >= get_num_dimensions()
+ (i.e. psi must have preallocated its memory before this function is called)
+ ensures
+ - This function computes the feature vector associated with the given rectangle
+ in obj. This rectangle is interpreted as a bounding box within the last image
+ given to this->load() and a feature vector describing that bounding box is
+ output into psi.
+ - The feature vector is added into psi. That is, it does not overwrite the
+ previous contents of psi, but instead, it adds the vector to psi.
+ - The dimensionality of the vector added to psi is get_num_dimensions(). This
+ means that elements of psi after psi(get_num_dimensions()-1) are not modified.
+ - #psi.size() == psi.size()
+ (i.e. this function does not change the size of the psi vector)
+ !*/
+
+ double compute_object_score (
+ const matrix<double,0,1>& w,
+ const rectangle& obj
+ ) const;
+ /*!
+ requires
+ - w.size() >= get_num_dimensions()
+ ensures
+ - This function returns the dot product between the feature vector for
+ object box obj and the given w vector. That is, this function computes
+ the same number as the following code snippet:
+ matrix<double,0,1> psi(w.size());
+ psi = 0;
+ get_feature_vector(obj, psi);
+ return dot(psi, w);
+ The point of the compute_object_score() routine is to compute this dot
+ product in a much more efficient way than directly calling
+ get_feature_vector() and dot(). Therefore, compute_object_score() is an
+ optional function. If you can't think of a faster way to compute these
+ scores then do not implement compute_object_score() and the
+ scan_image_custom object will simply compute these scores for you.
+ However, it is often the case that there is something clever you can do
+ to make this computation faster. If that is the case, then you can
+ provide an implementation of this function with your feature extractor
+ and then scan_image_custom will use it instead of using the default
+ calculation method shown in the above code snippet.
+ !*/
+
+ };
+
+// ----------------------------------------------------------------------------------------
+
+ void serialize(
+ const feature_extractor& item,
+ std::ostream& out
+ );
+ /*!
+ provides serialization support
+ !*/
+
+ void deserialize(
+ feature_extractor& item,
+ std::istream& in
+ );
+ /*!
+ provides deserialization support
+ !*/
+
+// ----------------------------------------------------------------------------------------
+// ----------------------------------------------------------------------------------------
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename Feature_extractor_type
+ >
+ class scan_image_custom : noncopyable
+ {
+ /*!
+ REQUIREMENTS ON Feature_extractor_type
+ - must be an object with an interface compatible with the
+ example_feature_extractor defined at the top of this file.
+
+ INITIAL VALUE
+ - is_loaded_with_image() == false
+
+ WHAT THIS OBJECT REPRESENTS
+ This object is a tool for running a classifier over an image with the goal
+ of localizing each object present. The localization is in the form of the
+ bounding box around each object of interest.
+
+ Unlike the scan_image_pyramid and scan_image_boxes objects, this image
+ scanner delegates all the work of constructing the object feature vector to
+ its Feature_extractor_type template argument. That is, scan_image_custom
+ simply asks the supplied feature extractor what boxes in the image we
+ should investigate and then asks the feature extractor for the complete
+ feature vector for each box. That is, scan_image_custom does not apply any
+ kind of pyramiding or other higher level processing to the features coming
+ out of the feature extractor. That means that when you use
+ scan_image_custom it is completely up to you to define the feature vector
+ used with each image box.
+
+ THREAD SAFETY
+ Concurrent access to an instance of this object is not safe and should be
+ protected by a mutex lock except for the case where you are copying the
+ configuration (via copy_configuration()) of a scan_image_custom object to
+ many other threads. In this case, it is safe to copy the configuration of
+ a shared object so long as no other operations are performed on it.
+ !*/
+
+ public:
+
+ typedef matrix<double,0,1> feature_vector_type;
+ typedef Feature_extractor_type feature_extractor_type;
+
+ scan_image_custom (
+ );
+ /*!
+ ensures
+ - this object is properly initialized
+ !*/
+
+ template <
+ typename image_type
+ >
+ void load (
+ const image_type& img
+ );
+ /*!
+ requires
+ - image_type must be a type with the following properties:
+ - image_type objects can be loaded into Feature_extractor_type
+ objects via Feature_extractor_type::load().
+ ensures
+ - #is_loaded_with_image() == true
+ - Calls get_feature_extractor().load() on the given image. That is, we
+ will have loaded the image into the feature extractor in this
+ scan_image_custom object. We will also have stored the candidate
+ object locations generated by the feature extractor and will scan
+ over them when this->detect() is called.
+ - This object is ready to run a classifier over img to detect object
+ locations. Call detect() to do this.
+ !*/
+
+ bool is_loaded_with_image (
+ ) const;
+ /*!
+ ensures
+ - returns true if this object has been loaded with an image to process and
+ false otherwise.
+ !*/
+
+ const feature_extractor_type& get_feature_extractor (
+ ) const;
+ /*!
+ ensures
+ - returns a const reference to the feature_extractor_type object used
+ internally for local feature extraction.
+ !*/
+
+ void copy_configuration(
+ const feature_extractor_type& fe
+ );
+ /*!
+ ensures
+ - This function performs the equivalent of
+ get_feature_extractor().copy_configuration(fe) (i.e. this function allows
+ you to configure the parameters of the underlying feature extractor used
+ by a scan_image_custom object)
+ !*/
+
+ void copy_configuration (
+ const scan_image_custom& item
+ );
+ /*!
+ ensures
+ - Copies all the state information of item into *this, except for state
+ information populated by load(). More precisely, given two
+ scan_image_custom objects S1 and S2, the following sequence of
+ instructions should always result in both of them having the exact same
+ state:
+ S2.copy_configuration(S1);
+ S1.load(img);
+ S2.load(img);
+ !*/
+
+ long get_num_dimensions (
+ ) const;
+ /*!
+ ensures
+ - returns the number of dimensions in the feature vector for a candidate
+ object location. That is, this function returns get_feature_extractor().get_num_dimensions().
+ !*/
+
+ void detect (
+ const feature_vector_type& w,
+ std::vector<std::pair<double, rectangle> >& dets,
+ const double thresh
+ ) const;
+ /*!
+ requires
+ - w.size() >= get_num_dimensions()
+ - is_loaded_with_image() == true
+ ensures
+ - Scans over all the candidate object locations produced by the feature
+ extractor during image loading and stores all detections into #dets.
+ - for all valid i:
+ - #dets[i].second == The candidate object location which produced this
+ detection. This rectangle gives the location of the detection.
+ - #dets[i].first == The score for this detection. This value is equal
+ to dot(w, feature vector for this candidate object location).
+ - #dets[i].first >= thresh
+ - #dets will be sorted in descending order.
+ (i.e. #dets[i].first >= #dets[j].first for all i, and j>i)
+ - Elements of w beyond index get_num_dimensions()-1 are ignored. I.e. only
+ the first get_num_dimensions() are used.
+ - Note that no form of non-max suppression is performed. If a locations
+ has a score >= thresh then it is reported in #dets.
+ !*/
+
+ void get_feature_vector (
+ const full_object_detection& obj,
+ feature_vector_type& psi
+ ) const;
+ /*!
+ requires
+ - obj.num_parts() == 0
+ - is_loaded_with_image() == true
+ - psi.size() >= get_num_dimensions()
+ (i.e. psi must have preallocated its memory before this function is called)
+ ensures
+ - This function allows you to determine the feature vector used for a
+ candidate object location output from detect(). Note that this vector is
+ added to psi. Note also that you must use get_full_object_detection() to
+ convert a rectangle from detect() into the needed full_object_detection.
+ - The dimensionality of the vector added to psi is get_num_dimensions(). This
+ means that elements of psi after psi(get_num_dimensions()-1) are not modified.
+ - Since scan_image_custom only searches a limited set of object locations,
+ not all possible rectangles can be output by detect(). So in the case
+ where obj.get_rect() could not arise from a call to detect(), this
+ function will map obj.get_rect() to the nearest possible rectangle and
+ then add the feature vector for the mapped rectangle into #psi.
+ - get_best_matching_rect(obj.get_rect()) == the rectangle obj.get_rect()
+ gets mapped to for feature extraction.
+ !*/
+
+ full_object_detection get_full_object_detection (
+ const rectangle& rect,
+ const feature_vector_type& w
+ ) const;
+ /*!
+ ensures
+ - returns full_object_detection(rect)
+ (This function is here only for compatibility with the scan_image_pyramid
+ object)
+ !*/
+
+ const rectangle get_best_matching_rect (
+ const rectangle& rect
+ ) const;
+ /*!
+ requires
+ - is_loaded_with_image() == true
+ ensures
+ - Since scan_image_custom only searches a limited set of object locations,
+ not all possible rectangles can be represented. Therefore, this function
+ allows you to supply a rectangle and obtain the nearest possible
+ candidate object location rectangle.
+ !*/
+
+ unsigned long get_num_detection_templates (
+ ) const { return 1; }
+ /*!
+ ensures
+ - returns 1. Note that this function is here only for compatibility with
+ the scan_image_pyramid object. Notionally, its return value indicates
+ that a scan_image_custom object is always ready to detect objects once an
+ image has been loaded.
+ !*/
+
+ unsigned long get_num_movable_components_per_detection_template (
+ ) const { return 0; }
+ /*!
+ ensures
+ - returns 0. Note that this function is here only for compatibility with
+ the scan_image_pyramid object. Its return value means that this object
+ does not support using movable part models.
+ !*/
+
+ };
+
+// ----------------------------------------------------------------------------------------
+
+ template <typename T>
+ void serialize (
+ const scan_image_custom<T>& item,
+ std::ostream& out
+ );
+ /*!
+ provides serialization support
+ !*/
+
+ template <typename T>
+ void deserialize (
+ scan_image_custom<T>& item,
+ std::istream& in
+ );
+ /*!
+ provides deserialization support
+ !*/
+
+// ----------------------------------------------------------------------------------------
+
+}
+
+#endif // DLIB_SCAN_IMAGE_CuSTOM_ABSTRACT_Hh_
+
diff --git a/ml/dlib/dlib/image_processing/scan_image_pyramid.h b/ml/dlib/dlib/image_processing/scan_image_pyramid.h
new file mode 100644
index 000000000..455f1a649
--- /dev/null
+++ b/ml/dlib/dlib/image_processing/scan_image_pyramid.h
@@ -0,0 +1,1101 @@
+// Copyright (C) 2011 Davis E. King (davis@dlib.net)
+// License: Boost Software License See LICENSE.txt for the full license.
+#ifndef DLIB_SCAN_IMaGE_PYRAMID_Hh_
+#define DLIB_SCAN_IMaGE_PYRAMID_Hh_
+
+#include "scan_image_pyramid_abstract.h"
+#include "../matrix.h"
+#include "../geometry.h"
+#include "scan_image.h"
+#include "../array2d.h"
+#include <vector>
+#include "full_object_detection.h"
+#include "../image_processing/generic_image.h"
+
+namespace dlib
+{
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename Pyramid_type,
+ typename Feature_extractor_type
+ >
+ class scan_image_pyramid : noncopyable
+ {
+
+ public:
+
+ typedef matrix<double,0,1> feature_vector_type;
+
+ typedef Pyramid_type pyramid_type;
+ typedef Feature_extractor_type feature_extractor_type;
+
+ scan_image_pyramid (
+ );
+
+ template <
+ typename image_type
+ >
+ void load (
+ const image_type& img
+ );
+
+ inline bool is_loaded_with_image (
+ ) const;
+
+ inline void copy_configuration(
+ const feature_extractor_type& fe
+ );
+
+ inline void copy_configuration (
+ const scan_image_pyramid& item
+ );
+
+ const Feature_extractor_type& get_feature_extractor (
+ ) const { return feats_config; }
+
+ void add_detection_template (
+ const rectangle& object_box,
+ const std::vector<rectangle>& stationary_feature_extraction_regions,
+ const std::vector<rectangle>& movable_feature_extraction_regions
+ );
+
+ void add_detection_template (
+ const rectangle& object_box,
+ const std::vector<rectangle>& stationary_feature_extraction_regions
+ );
+
+ inline unsigned long get_num_detection_templates (
+ ) const;
+
+ inline unsigned long get_num_movable_components_per_detection_template (
+ ) const;
+
+ inline unsigned long get_num_stationary_components_per_detection_template (
+ ) const;
+
+ inline unsigned long get_num_components_per_detection_template (
+ ) const;
+
+ inline long get_num_dimensions (
+ ) const;
+
+ unsigned long get_max_pyramid_levels (
+ ) const;
+
+ void set_max_pyramid_levels (
+ unsigned long max_levels
+ );
+
+ inline unsigned long get_max_detections_per_template (
+ ) const;
+
+ void set_min_pyramid_layer_size (
+ unsigned long width,
+ unsigned long height
+ );
+
+ inline unsigned long get_min_pyramid_layer_width (
+ ) const;
+
+ inline unsigned long get_min_pyramid_layer_height (
+ ) const;
+
+ void set_max_detections_per_template (
+ unsigned long max_dets
+ );
+
+ void detect (
+ const feature_vector_type& w,
+ std::vector<std::pair<double, rectangle> >& dets,
+ const double thresh
+ ) const;
+
+ void get_feature_vector (
+ const full_object_detection& obj,
+ feature_vector_type& psi
+ ) const;
+
+ full_object_detection get_full_object_detection (
+ const rectangle& rect,
+ const feature_vector_type& w
+ ) const;
+
+ const rectangle get_best_matching_rect (
+ const rectangle& rect
+ ) const;
+
+ template <typename T, typename U>
+ friend void serialize (
+ const scan_image_pyramid<T,U>& item,
+ std::ostream& out
+ );
+
+ template <typename T, typename U>
+ friend void deserialize (
+ scan_image_pyramid<T,U>& item,
+ std::istream& in
+ );
+
+ private:
+ static bool compare_pair_rect (
+ const std::pair<double, rectangle>& a,
+ const std::pair<double, rectangle>& b
+ )
+ {
+ return a.first < b.first;
+ }
+
+ struct detection_template
+ {
+ rectangle object_box; // always centered at (0,0)
+ std::vector<rectangle> rects; // template with respect to (0,0)
+ std::vector<rectangle> movable_rects;
+ };
+
+ friend void serialize(const detection_template& item, std::ostream& out)
+ {
+ int version = 1;
+ serialize(version, out);
+ serialize(item.object_box, out);
+ serialize(item.rects, out);
+ serialize(item.movable_rects, out);
+ }
+ friend void deserialize(detection_template& item, std::istream& in)
+ {
+ int version = 0;
+ deserialize(version, in);
+ if (version != 1)
+ throw serialization_error("Unexpected version found while deserializing a dlib::scan_image_pyramid::detection_template object.");
+
+ deserialize(item.object_box, in);
+ deserialize(item.rects, in);
+ deserialize(item.movable_rects, in);
+ }
+
+ void get_mapped_rect_and_metadata (
+ const unsigned long number_pyramid_levels,
+ rectangle rect,
+ rectangle& mapped_rect,
+ detection_template& best_template,
+ rectangle& object_box,
+ unsigned long& best_level,
+ unsigned long& detection_template_idx
+ ) const;
+
+ double get_match_score (
+ rectangle r1,
+ rectangle r2
+ ) const
+ {
+ // make the rectangles overlap as much as possible before computing the match score.
+ r1 = move_rect(r1, r2.tl_corner());
+ return (r1.intersect(r2).area())/(double)(r1 + r2).area();
+ }
+
+ void test_coordinate_transforms()
+ {
+ for (long x = -10; x <= 10; x += 10)
+ {
+ for (long y = -10; y <= 10; y += 10)
+ {
+ const rectangle rect = centered_rect(x,y,5,6);
+ rectangle a;
+
+ a = feats_config.image_to_feat_space(rect);
+ if (a.width() > 10000000 || a.height() > 10000000 )
+ {
+ DLIB_CASSERT(false, "The image_to_feat_space() routine is outputting rectangles of an implausibly "
+ << "\nlarge size. This means there is probably a bug in your feature extractor.");
+ }
+ a = feats_config.feat_to_image_space(rect);
+ if (a.width() > 10000000 || a.height() > 10000000 )
+ {
+ DLIB_CASSERT(false, "The feat_to_image_space() routine is outputting rectangles of an implausibly "
+ << "\nlarge size. This means there is probably a bug in your feature extractor.");
+ }
+ }
+ }
+
+ }
+
+ feature_extractor_type feats_config; // just here to hold configuration. use it to populate the feats elements.
+ array<feature_extractor_type> feats;
+ std::vector<detection_template> det_templates;
+ unsigned long max_dets_per_template;
+ unsigned long max_pyramid_levels;
+ unsigned long min_pyramid_layer_width;
+ unsigned long min_pyramid_layer_height;
+
+ };
+
+// ----------------------------------------------------------------------------------------
+
+ template <typename T, typename U>
+ void serialize (
+ const scan_image_pyramid<T,U>& item,
+ std::ostream& out
+ )
+ {
+ int version = 3;
+ serialize(version, out);
+ serialize(item.feats_config, out);
+ serialize(item.feats, out);
+ serialize(item.det_templates, out);
+ serialize(item.max_dets_per_template, out);
+ serialize(item.max_pyramid_levels, out);
+ serialize(item.min_pyramid_layer_width, out);
+ serialize(item.min_pyramid_layer_height, out);
+ serialize(item.get_num_dimensions(), out);
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <typename T, typename U>
+ void deserialize (
+ scan_image_pyramid<T,U>& item,
+ std::istream& in
+ )
+ {
+ int version = 0;
+ deserialize(version, in);
+ if (version != 3)
+ throw serialization_error("Unsupported version found when deserializing a scan_image_pyramid object.");
+
+ deserialize(item.feats_config, in);
+ deserialize(item.feats, in);
+ deserialize(item.det_templates, in);
+ deserialize(item.max_dets_per_template, in);
+ deserialize(item.max_pyramid_levels, in);
+ deserialize(item.min_pyramid_layer_width, in);
+ deserialize(item.min_pyramid_layer_height, in);
+
+ // When developing some feature extractor, it's easy to accidentally change its
+ // number of dimensions and then try to deserialize data from an older version of
+ // your extractor into the current code. This check is here to catch that kind of
+ // user error.
+ long dims;
+ deserialize(dims, in);
+ if (item.get_num_dimensions() != dims)
+ throw serialization_error("Number of dimensions in serialized scan_image_pyramid doesn't match the expected number.");
+ }
+
+// ----------------------------------------------------------------------------------------
+// ----------------------------------------------------------------------------------------
+// scan_image_pyramid member functions
+// ----------------------------------------------------------------------------------------
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename Pyramid_type,
+ typename Feature_extractor_type
+ >
+ scan_image_pyramid<Pyramid_type,Feature_extractor_type>::
+ scan_image_pyramid (
+ ) :
+ max_dets_per_template(10000),
+ max_pyramid_levels(1000),
+ min_pyramid_layer_width(20),
+ min_pyramid_layer_height(20)
+ {
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename Pyramid_type,
+ typename Feature_extractor_type
+ >
+ template <
+ typename image_type
+ >
+ void scan_image_pyramid<Pyramid_type,Feature_extractor_type>::
+ load (
+ const image_type& img
+ )
+ {
+ unsigned long levels = 0;
+ rectangle rect = get_rect(img);
+
+ // figure out how many pyramid levels we should be using based on the image size
+ pyramid_type pyr;
+ do
+ {
+ rect = pyr.rect_down(rect);
+ ++levels;
+ } while (rect.width() >= min_pyramid_layer_width && rect.height() >= min_pyramid_layer_height &&
+ levels < max_pyramid_levels);
+
+ if (feats.max_size() < levels)
+ feats.set_max_size(levels);
+ feats.set_size(levels);
+
+ for (unsigned long i = 0; i < feats.size(); ++i)
+ feats[i].copy_configuration(feats_config);
+
+ // build our feature pyramid
+ feats[0].load(img);
+ if (feats.size() > 1)
+ {
+ image_type temp1, temp2;
+ pyr(img, temp1);
+ feats[1].load(temp1);
+ swap(temp1,temp2);
+
+ for (unsigned long i = 2; i < feats.size(); ++i)
+ {
+ pyr(temp2, temp1);
+ feats[i].load(temp1);
+ swap(temp1,temp2);
+ }
+ }
+
+
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename Pyramid_type,
+ typename Feature_extractor_type
+ >
+ unsigned long scan_image_pyramid<Pyramid_type,Feature_extractor_type>::
+ get_max_detections_per_template (
+ ) const
+ {
+ return max_dets_per_template;
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename Pyramid_type,
+ typename Feature_extractor_type
+ >
+ void scan_image_pyramid<Pyramid_type,Feature_extractor_type>::
+ set_max_detections_per_template (
+ unsigned long max_dets
+ )
+ {
+ // make sure requires clause is not broken
+ DLIB_ASSERT(max_dets > 0 ,
+ "\t void scan_image_pyramid::set_max_detections_per_template()"
+ << "\n\t The max number of possible detections can't be zero. "
+ << "\n\t max_dets: " << max_dets
+ << "\n\t this: " << this
+ );
+
+ max_dets_per_template = max_dets;
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename Pyramid_type,
+ typename Feature_extractor_type
+ >
+ bool scan_image_pyramid<Pyramid_type,Feature_extractor_type>::
+ is_loaded_with_image (
+ ) const
+ {
+ return feats.size() != 0;
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename Pyramid_type,
+ typename Feature_extractor_type
+ >
+ void scan_image_pyramid<Pyramid_type,Feature_extractor_type>::
+ copy_configuration(
+ const feature_extractor_type& fe
+ )
+ {
+ test_coordinate_transforms();
+ feats_config.copy_configuration(fe);
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename Pyramid_type,
+ typename Feature_extractor_type
+ >
+ void scan_image_pyramid<Pyramid_type,Feature_extractor_type>::
+ copy_configuration (
+ const scan_image_pyramid& item
+ )
+ {
+ feats_config.copy_configuration(item.feats_config);
+ det_templates = item.det_templates;
+ max_dets_per_template = item.max_dets_per_template;
+ max_pyramid_levels = item.max_pyramid_levels;
+ min_pyramid_layer_width = item.min_pyramid_layer_width;
+ min_pyramid_layer_height = item.min_pyramid_layer_height;
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename Pyramid_type,
+ typename Feature_extractor_type
+ >
+ void scan_image_pyramid<Pyramid_type,Feature_extractor_type>::
+ add_detection_template (
+ const rectangle& object_box,
+ const std::vector<rectangle>& stationary_feature_extraction_regions,
+ const std::vector<rectangle>& movable_feature_extraction_regions
+ )
+ {
+#ifdef ENABLE_ASSERTS
+ // make sure requires clause is not broken
+ DLIB_ASSERT((get_num_detection_templates() == 0 ||
+ (get_num_stationary_components_per_detection_template() == stationary_feature_extraction_regions.size() &&
+ get_num_movable_components_per_detection_template() == movable_feature_extraction_regions.size())) &&
+ center(object_box) == point(0,0),
+ "\t void scan_image_pyramid::add_detection_template()"
+ << "\n\t The number of rects in this new detection template doesn't match "
+ << "\n\t the number in previous detection templates."
+ << "\n\t get_num_stationary_components_per_detection_template(): " << get_num_stationary_components_per_detection_template()
+ << "\n\t stationary_feature_extraction_regions.size(): " << stationary_feature_extraction_regions.size()
+ << "\n\t get_num_movable_components_per_detection_template(): " << get_num_movable_components_per_detection_template()
+ << "\n\t movable_feature_extraction_regions.size(): " << movable_feature_extraction_regions.size()
+ << "\n\t this: " << this
+ );
+
+ for (unsigned long i = 0; i < movable_feature_extraction_regions.size(); ++i)
+ {
+ DLIB_ASSERT(center(movable_feature_extraction_regions[i]) == point(0,0),
+ "Invalid inputs were given to this function."
+ << "\n\t center(movable_feature_extraction_regions["<<i<<"]): " << center(movable_feature_extraction_regions[i])
+ << "\n\t this: " << this
+ );
+ }
+#endif
+
+ detection_template temp;
+ temp.object_box = object_box;
+ temp.rects = stationary_feature_extraction_regions;
+ temp.movable_rects = movable_feature_extraction_regions;
+ det_templates.push_back(temp);
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename Pyramid_type,
+ typename Feature_extractor_type
+ >
+ void scan_image_pyramid<Pyramid_type,Feature_extractor_type>::
+ add_detection_template (
+ const rectangle& object_box,
+ const std::vector<rectangle>& stationary_feature_extraction_regions
+ )
+ {
+ // an empty set of movable feature regions
+ const std::vector<rectangle> movable_feature_extraction_regions;
+ add_detection_template(object_box, stationary_feature_extraction_regions,
+ movable_feature_extraction_regions);
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename Pyramid_type,
+ typename Feature_extractor_type
+ >
+ unsigned long scan_image_pyramid<Pyramid_type,Feature_extractor_type>::
+ get_num_detection_templates (
+ ) const
+ {
+ return det_templates.size();
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename Pyramid_type,
+ typename Feature_extractor_type
+ >
+ unsigned long scan_image_pyramid<Pyramid_type,Feature_extractor_type>::
+ get_num_stationary_components_per_detection_template (
+ ) const
+ {
+ // make sure requires clause is not broken
+ DLIB_ASSERT(get_num_detection_templates() > 0 ,
+ "\t unsigned long scan_image_pyramid::get_num_stationary_components_per_detection_template()"
+ << "\n\t You need to give some detection templates before calling this function. "
+ << "\n\t get_num_detection_templates(): " << get_num_detection_templates()
+ << "\n\t this: " << this
+ );
+
+ return det_templates[0].rects.size();
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename Pyramid_type,
+ typename Feature_extractor_type
+ >
+ unsigned long scan_image_pyramid<Pyramid_type,Feature_extractor_type>::
+ get_num_movable_components_per_detection_template (
+ ) const
+ {
+ // make sure requires clause is not broken
+ DLIB_ASSERT(get_num_detection_templates() > 0 ,
+ "\t unsigned long scan_image_pyramid::get_num_movable_components_per_detection_template()"
+ << "\n\t You need to give some detection templates before calling this function. "
+ << "\n\t get_num_detection_templates(): " << get_num_detection_templates()
+ << "\n\t this: " << this
+ );
+
+ return det_templates[0].movable_rects.size();
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename Pyramid_type,
+ typename Feature_extractor_type
+ >
+ unsigned long scan_image_pyramid<Pyramid_type,Feature_extractor_type>::
+ get_num_components_per_detection_template (
+ ) const
+ {
+ // make sure requires clause is not broken
+ DLIB_ASSERT(get_num_detection_templates() > 0 ,
+ "\t unsigned long scan_image_pyramid::get_num_components_per_detection_template()"
+ << "\n\t You need to give some detection templates before calling this function. "
+ << "\n\t get_num_detection_templates(): " << get_num_detection_templates()
+ << "\n\t this: " << this
+ );
+
+ return get_num_movable_components_per_detection_template() +
+ get_num_stationary_components_per_detection_template();
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename Pyramid_type,
+ typename Feature_extractor_type
+ >
+ long scan_image_pyramid<Pyramid_type,Feature_extractor_type>::
+ get_num_dimensions (
+ ) const
+ {
+ // make sure requires clause is not broken
+ DLIB_ASSERT(get_num_detection_templates() > 0 ,
+ "\t long scan_image_pyramid::get_num_dimensions()"
+ << "\n\t You need to give some detection templates before calling this function. "
+ << "\n\t get_num_detection_templates(): " << get_num_detection_templates()
+ << "\n\t this: " << this
+ );
+
+ return feats_config.get_num_dimensions()*get_num_components_per_detection_template() + get_num_detection_templates();
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename Pyramid_type,
+ typename Feature_extractor_type
+ >
+ unsigned long scan_image_pyramid<Pyramid_type,Feature_extractor_type>::
+ get_max_pyramid_levels (
+ ) const
+ {
+ return max_pyramid_levels;
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename Pyramid_type,
+ typename Feature_extractor_type
+ >
+ void scan_image_pyramid<Pyramid_type,Feature_extractor_type>::
+ set_max_pyramid_levels (
+ unsigned long max_levels
+ )
+ {
+ // make sure requires clause is not broken
+ DLIB_ASSERT(max_levels > 0 ,
+ "\t void scan_image_pyramid::set_max_pyramid_levels()"
+ << "\n\t You can't have zero levels. "
+ << "\n\t max_levels: " << max_levels
+ << "\n\t this: " << this
+ );
+
+ max_pyramid_levels = max_levels;
+
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename Pyramid_type,
+ typename Feature_extractor_type
+ >
+ void scan_image_pyramid<Pyramid_type,Feature_extractor_type>::
+ detect (
+ const feature_vector_type& w,
+ std::vector<std::pair<double, rectangle> >& dets,
+ const double thresh
+ ) const
+ {
+ // make sure requires clause is not broken
+ DLIB_ASSERT(get_num_detection_templates() > 0 &&
+ is_loaded_with_image() &&
+ w.size() >= get_num_dimensions(),
+ "\t void scan_image_pyramid::detect()"
+ << "\n\t Invalid inputs were given to this function "
+ << "\n\t get_num_detection_templates(): " << get_num_detection_templates()
+ << "\n\t is_loaded_with_image(): " << is_loaded_with_image()
+ << "\n\t w.size(): " << w.size()
+ << "\n\t get_num_dimensions(): " << get_num_dimensions()
+ << "\n\t this: " << this
+ );
+
+ dets.clear();
+
+ array<array2d<double> > saliency_images;
+ saliency_images.set_max_size(get_num_components_per_detection_template());
+ saliency_images.set_size(get_num_components_per_detection_template());
+ std::vector<std::pair<unsigned int,rectangle> > stationary_region_rects(get_num_stationary_components_per_detection_template());
+ std::vector<std::pair<unsigned int,rectangle> > movable_region_rects(get_num_movable_components_per_detection_template());
+ pyramid_type pyr;
+ std::vector<std::pair<double, point> > point_dets;
+
+ // for all pyramid levels
+ for (unsigned long l = 0; l < feats.size(); ++l)
+ {
+ for (unsigned long i = 0; i < saliency_images.size(); ++i)
+ {
+ saliency_images[i].set_size(feats[l].nr(), feats[l].nc());
+ const unsigned long offset = get_num_detection_templates() + feats_config.get_num_dimensions()*i;
+
+ // build saliency images for pyramid level l
+ for (long r = 0; r < feats[l].nr(); ++r)
+ {
+ for (long c = 0; c < feats[l].nc(); ++c)
+ {
+ const typename feature_extractor_type::descriptor_type& descriptor = feats[l](r,c);
+
+ double sum = 0;
+ for (unsigned long k = 0; k < descriptor.size(); ++k)
+ {
+ sum += w(descriptor[k].first + offset)*descriptor[k].second;
+ }
+ saliency_images[i][r][c] = sum;
+ }
+ }
+ }
+
+ // now search the saliency images
+ for (unsigned long i = 0; i < det_templates.size(); ++i)
+ {
+ const point offset = -feats[l].image_to_feat_space(point(0,0));
+ for (unsigned long j = 0; j < stationary_region_rects.size(); ++j)
+ {
+ stationary_region_rects[j] = std::make_pair(j, translate_rect(feats[l].image_to_feat_space(det_templates[i].rects[j]),offset));
+ }
+ for (unsigned long j = 0; j < movable_region_rects.size(); ++j)
+ {
+ // Scale the size of the movable rectangle but make sure its center
+ // stays at point(0,0).
+ const rectangle temp = feats[l].image_to_feat_space(det_templates[i].movable_rects[j]);
+ movable_region_rects[j] = std::make_pair(j+stationary_region_rects.size(),
+ centered_rect(point(0,0),temp.width(), temp.height()));
+ }
+
+ // Scale the object box into the feature extraction image, but keeping it
+ // centered at point(0,0).
+ rectangle scaled_object_box = feats[l].image_to_feat_space(det_templates[i].object_box);
+ scaled_object_box = centered_rect(point(0,0),scaled_object_box.width(), scaled_object_box.height());
+
+ // Each detection template gets its own special threshold in addition to
+ // the global detection threshold. This allows us to model the fact that
+ // some detection templates might be more prone to false alarming or since
+ // their size is different naturally require a larger or smaller threshold
+ // (since they integrate over a larger or smaller region of the image).
+ const double template_specific_thresh = w(i);
+
+ scan_image_movable_parts(point_dets, saliency_images, scaled_object_box,
+ stationary_region_rects, movable_region_rects,
+ thresh+template_specific_thresh, max_dets_per_template);
+
+ // convert all the point detections into rectangles at the original image scale and coordinate system
+ for (unsigned long j = 0; j < point_dets.size(); ++j)
+ {
+ const double score = point_dets[j].first-template_specific_thresh;
+ point p = point_dets[j].second;
+ p = feats[l].feat_to_image_space(p);
+ rectangle rect = translate_rect(det_templates[i].object_box, p);
+ rect = pyr.rect_up(rect, l);
+
+ dets.push_back(std::make_pair(score, rect));
+ }
+ }
+ }
+
+ std::sort(dets.rbegin(), dets.rend(), compare_pair_rect);
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename Pyramid_type,
+ typename Feature_extractor_type
+ >
+ const rectangle scan_image_pyramid<Pyramid_type,Feature_extractor_type>::
+ get_best_matching_rect (
+ const rectangle& rect
+ ) const
+ {
+ // make sure requires clause is not broken
+ DLIB_ASSERT(get_num_detection_templates() > 0 ,
+ "\t const rectangle scan_image_pyramid::get_best_matching_rect()"
+ << "\n\t Invalid inputs were given to this function "
+ << "\n\t get_num_detection_templates(): " << get_num_detection_templates()
+ << "\n\t this: " << this
+ );
+
+ rectangle mapped_rect, object_box;
+ detection_template best_template;
+ unsigned long best_level, junk;
+ get_mapped_rect_and_metadata(max_pyramid_levels, rect, mapped_rect, best_template, object_box, best_level, junk);
+ return mapped_rect;
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename Pyramid_type,
+ typename Feature_extractor_type
+ >
+ void scan_image_pyramid<Pyramid_type,Feature_extractor_type>::
+ get_mapped_rect_and_metadata (
+ const unsigned long number_pyramid_levels,
+ rectangle rect,
+ rectangle& mapped_rect,
+ detection_template& best_template,
+ rectangle& object_box,
+ unsigned long& best_level,
+ unsigned long& detection_template_idx
+ ) const
+ {
+ pyramid_type pyr;
+ // Figure out the pyramid level which best matches rect against one of our
+ // detection template object boxes.
+ best_level = 0;
+ double best_match_score = -1;
+
+
+ // Find the best matching detection template for rect
+ for (unsigned long l = 0; l < number_pyramid_levels; ++l)
+ {
+ const rectangle temp = pyr.rect_down(rect,l);
+ if (temp.area() <= 1)
+ break;
+
+ // At this pyramid level, what matches best?
+ for (unsigned long t = 0; t < det_templates.size(); ++t)
+ {
+ const double match_score = get_match_score(det_templates[t].object_box, temp);
+ if (match_score > best_match_score)
+ {
+ best_match_score = match_score;
+ best_level = l;
+ best_template = det_templates[t];
+ detection_template_idx = t;
+ }
+ }
+ }
+
+
+ // Now we translate best_template into the right spot (it should be centered at the location
+ // determined by rect) and convert it into the feature image coordinate system.
+ rect = pyr.rect_down(rect,best_level);
+ const point offset = -feats_config.image_to_feat_space(point(0,0));
+ const point origin = feats_config.image_to_feat_space(center(rect)) + offset;
+ for (unsigned long k = 0; k < best_template.rects.size(); ++k)
+ {
+ rectangle temp = best_template.rects[k];
+ temp = feats_config.image_to_feat_space(temp);
+ temp = translate_rect(temp, origin);
+ best_template.rects[k] = temp;
+ }
+ for (unsigned long k = 0; k < best_template.movable_rects.size(); ++k)
+ {
+ rectangle temp = best_template.movable_rects[k];
+ temp = feats_config.image_to_feat_space(temp);
+ temp = centered_rect(point(0,0), temp.width(), temp.height());
+ best_template.movable_rects[k] = temp;
+ }
+
+ const rectangle scaled_object_box = feats_config.image_to_feat_space(best_template.object_box);
+ object_box = centered_rect(origin-offset, scaled_object_box.width(), scaled_object_box.height());
+
+ // The input rectangle was mapped to one of the detection templates. Reverse the process
+ // to figure out what the mapped rectangle is in the original input space.
+ mapped_rect = translate_rect(best_template.object_box, feats_config.feat_to_image_space(origin-offset));
+ mapped_rect = pyr.rect_up(mapped_rect, best_level);
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename Pyramid_type,
+ typename Feature_extractor_type
+ >
+ full_object_detection scan_image_pyramid<Pyramid_type,Feature_extractor_type>::
+ get_full_object_detection (
+ const rectangle& rect,
+ const feature_vector_type& w
+ ) const
+ {
+ // fill in movable part positions.
+
+ rectangle mapped_rect;
+ detection_template best_template;
+ unsigned long best_level, junk;
+ rectangle object_box;
+ get_mapped_rect_and_metadata(feats.size(), rect, mapped_rect, best_template, object_box, best_level, junk);
+
+ Pyramid_type pyr;
+
+ array2d<double> saliency_image, sum_img;
+
+ double total_temp_score = 0;
+ // convert into feature space.
+ object_box = object_box.intersect(get_rect(feats[best_level]));
+
+ std::vector<point> movable_parts;
+ movable_parts.reserve(get_num_movable_components_per_detection_template());
+ for (unsigned long i = 0; i < get_num_movable_components_per_detection_template(); ++i)
+ {
+ // make the saliency_image for the ith movable part.
+
+ const rectangle part_rect = best_template.movable_rects[i];
+ const rectangle area = grow_rect(object_box,
+ part_rect.width()/2,
+ part_rect.height()/2).intersect(get_rect(feats[best_level]));
+
+ saliency_image.set_size(area.height(), area.width());
+ const unsigned long offset = get_num_detection_templates() + feats_config.get_num_dimensions()*(i+get_num_stationary_components_per_detection_template());
+
+ // build saliency image for pyramid level best_level
+ for (long r = area.top(); r <= area.bottom(); ++r)
+ {
+ for (long c = area.left(); c <= area.right(); ++c)
+ {
+ const typename feature_extractor_type::descriptor_type& descriptor = feats[best_level](r,c);
+
+ double sum = 0;
+ for (unsigned long k = 0; k < descriptor.size(); ++k)
+ {
+ sum += w(descriptor[k].first + offset)*descriptor[k].second;
+ }
+ saliency_image[r-area.top()][c-area.left()] = sum;
+ }
+ }
+
+ sum_img.set_size(saliency_image.nr(), saliency_image.nc());
+ sum_filter_assign(saliency_image, sum_img, part_rect);
+ // Figure out where the maximizer is in sum_img. Note that we
+ // only look in the part of sum_img that corresponds to a location inside
+ // object_box.
+ rectangle valid_area = get_rect(sum_img);
+ valid_area.left() += object_box.left() - area.left();
+ valid_area.top() += object_box.top() - area.top();
+ valid_area.right() += object_box.right() - area.right();
+ valid_area.bottom() += object_box.bottom() - area.bottom();
+ double max_val = 0;
+ point max_loc;
+ for (long r = valid_area.top(); r <= valid_area.bottom(); ++r)
+ {
+ for (long c = valid_area.left(); c <= valid_area.right(); ++c)
+ {
+ if (sum_img[r][c] > max_val)
+ {
+ //if (object_box.contains(point(c,r) + area.tl_corner()))
+ {
+ max_loc = point(c,r);
+ max_val = sum_img[r][c];
+ }
+ }
+ }
+ }
+
+ if (max_val <= 0)
+ {
+ max_loc = OBJECT_PART_NOT_PRESENT;
+ }
+ else
+ {
+ total_temp_score += max_val;
+ // convert max_loc back into feature image space from our cropped image.
+ max_loc += area.tl_corner();
+
+ // now convert from feature space to image space.
+ max_loc = feats[best_level].feat_to_image_space(max_loc);
+ max_loc = pyr.point_up(max_loc, best_level);
+ max_loc = nearest_point(rect, max_loc);
+ }
+
+ movable_parts.push_back(max_loc);
+ }
+
+ return full_object_detection(rect, movable_parts);
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename Pyramid_type,
+ typename Feature_extractor_type
+ >
+ void scan_image_pyramid<Pyramid_type,Feature_extractor_type>::
+ get_feature_vector (
+ const full_object_detection& obj,
+ feature_vector_type& psi
+ ) const
+ {
+ // make sure requires clause is not broken
+ DLIB_ASSERT(get_num_detection_templates() > 0 &&
+ is_loaded_with_image() &&
+ psi.size() >= get_num_dimensions() &&
+ obj.num_parts() == get_num_movable_components_per_detection_template(),
+ "\t void scan_image_pyramid::get_feature_vector()"
+ << "\n\t Invalid inputs were given to this function "
+ << "\n\t get_num_detection_templates(): " << get_num_detection_templates()
+ << "\n\t is_loaded_with_image(): " << is_loaded_with_image()
+ << "\n\t psi.size(): " << psi.size()
+ << "\n\t get_num_dimensions(): " << get_num_dimensions()
+ << "\n\t get_num_movable_components_per_detection_template(): " << get_num_movable_components_per_detection_template()
+ << "\n\t obj.num_parts(): " << obj.num_parts()
+ << "\n\t this: " << this
+ );
+ DLIB_ASSERT(all_parts_in_rect(obj),
+ "\t void scan_image_pyramid::get_feature_vector()"
+ << "\n\t Invalid inputs were given to this function "
+ << "\n\t obj.get_rect(): " << obj.get_rect()
+ << "\n\t this: " << this
+ );
+
+
+
+ rectangle mapped_rect;
+ detection_template best_template;
+ unsigned long best_level, detection_template_idx;
+ rectangle object_box;
+ get_mapped_rect_and_metadata(feats.size(), obj.get_rect(), mapped_rect, best_template, object_box, best_level, detection_template_idx);
+
+ psi(detection_template_idx) -= 1;
+
+ Pyramid_type pyr;
+
+ // put the movable rects at the places indicated by obj.
+ std::vector<rectangle> rects = best_template.rects;
+ for (unsigned long i = 0; i < obj.num_parts(); ++i)
+ {
+ if (obj.part(i) != OBJECT_PART_NOT_PRESENT)
+ {
+ // map from the original image to scaled feature space.
+ point loc = feats[best_level].image_to_feat_space(pyr.point_down(obj.part(i), best_level));
+ // Make sure the movable part always stays within the object_box.
+ // Otherwise it would be at a place that the detect() function can never
+ // look.
+ loc = nearest_point(object_box, loc);
+ rects.push_back(translate_rect(best_template.movable_rects[i], loc));
+ }
+ else
+ {
+ // add an empty rectangle since this part wasn't observed.
+ rects.push_back(rectangle());
+ }
+ }
+
+ // pull features out of all the boxes in rects.
+ for (unsigned long j = 0; j < rects.size(); ++j)
+ {
+ const rectangle rect = rects[j].intersect(get_rect(feats[best_level]));
+ const unsigned long template_region_id = j;
+ const unsigned long offset = get_num_detection_templates() + feats_config.get_num_dimensions()*template_region_id;
+ for (long r = rect.top(); r <= rect.bottom(); ++r)
+ {
+ for (long c = rect.left(); c <= rect.right(); ++c)
+ {
+ const typename feature_extractor_type::descriptor_type& descriptor = feats[best_level](r,c);
+ for (unsigned long k = 0; k < descriptor.size(); ++k)
+ {
+ psi(descriptor[k].first + offset) += descriptor[k].second;
+ }
+ }
+ }
+ }
+
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename Pyramid_type,
+ typename Feature_extractor_type
+ >
+ void scan_image_pyramid<Pyramid_type,Feature_extractor_type>::
+ set_min_pyramid_layer_size (
+ unsigned long width,
+ unsigned long height
+ )
+ {
+ // make sure requires clause is not broken
+ DLIB_ASSERT(width > 0 && height > 0 ,
+ "\t void scan_image_pyramid::set_min_pyramid_layer_size()"
+ << "\n\t These sizes can't be zero. "
+ << "\n\t width: " << width
+ << "\n\t height: " << height
+ << "\n\t this: " << this
+ );
+
+ min_pyramid_layer_width = width;
+ min_pyramid_layer_height = height;
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename Pyramid_type,
+ typename Feature_extractor_type
+ >
+ unsigned long scan_image_pyramid<Pyramid_type,Feature_extractor_type>::
+ get_min_pyramid_layer_width (
+ ) const
+ {
+ return min_pyramid_layer_width;
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename Pyramid_type,
+ typename Feature_extractor_type
+ >
+ unsigned long scan_image_pyramid<Pyramid_type,Feature_extractor_type>::
+ get_min_pyramid_layer_height (
+ ) const
+ {
+ return min_pyramid_layer_height;
+ }
+
+// ----------------------------------------------------------------------------------------
+
+}
+
+#endif // DLIB_SCAN_IMaGE_PYRAMID_Hh_
+
+
diff --git a/ml/dlib/dlib/image_processing/scan_image_pyramid_abstract.h b/ml/dlib/dlib/image_processing/scan_image_pyramid_abstract.h
new file mode 100644
index 000000000..e985a3f32
--- /dev/null
+++ b/ml/dlib/dlib/image_processing/scan_image_pyramid_abstract.h
@@ -0,0 +1,495 @@
+// Copyright (C) 2011 Davis E. King (davis@dlib.net)
+// License: Boost Software License See LICENSE.txt for the full license.
+#undef DLIB_SCAN_IMaGE_PYRAMID_ABSTRACT_Hh_
+#ifdef DLIB_SCAN_IMaGE_PYRAMID_ABSTRACT_Hh_
+
+#include "../matrix.h"
+#include "../geometry.h"
+#include "../image_processing.h"
+#include "../array2d.h"
+#include <vector>
+#include "full_object_detection_abstract.h"
+
+namespace dlib
+{
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename Pyramid_type,
+ typename Feature_extractor_type
+ >
+ class scan_image_pyramid : noncopyable
+ {
+ /*!
+ REQUIREMENTS ON Pyramid_type
+ - must be one of the pyramid_down objects defined in
+ dlib/image_transforms/image_pyramid_abstract.h or an object with
+ a compatible interface
+
+ REQUIREMENTS ON Feature_extractor_type
+ - must be an object with an interface compatible with the hashed_feature_image
+ object defined in dlib/image_keypoint/hashed_feature_image_abstract.h or
+ with the nearest_neighbor_feature_image object defined in
+ dlib/image_keypoint/nearest_neighbor_feature_image_abstract.h
+
+ INITIAL VALUE
+ - get_num_detection_templates() == 0
+ - is_loaded_with_image() == false
+ - get_max_detections_per_template() == 10000
+ - get_max_pyramid_levels() == 1000
+ - get_min_pyramid_layer_width() == 20
+ - get_min_pyramid_layer_height() == 20
+
+ WHAT THIS OBJECT REPRESENTS
+ This object is a tool for running a sliding window classifier over
+ an image pyramid. This object can also be understood as a general
+ tool for implementing the spatial pyramid models described in the paper:
+ Beyond Bags of Features: Spatial Pyramid Matching for Recognizing
+ Natural Scene Categories by Svetlana Lazebnik, Cordelia Schmid,
+ and Jean Ponce
+ It also includes the ability to represent movable part models.
+
+
+
+
+ The sliding window classifiers used by this object have three parts:
+ 1. The underlying feature extraction provided by Feature_extractor_type
+ objects, which associate a vector with each location in an image.
+
+ 2. A detection template. This is a rectangle which defines the shape of a
+ sliding window (i.e. the object_box), as well as a set of rectangular feature
+ extraction regions inside it. This set of regions defines the spatial
+ structure of the overall feature extraction within a sliding window. In
+ particular, each location of a sliding window has a feature vector
+ associated with it. This feature vector is defined as follows:
+ - Let N denote the number of feature extraction zones.
+ - Let M denote the dimensionality of the vectors output by Feature_extractor_type
+ objects.
+ - Let F(i) == the M dimensional vector which is the sum of all vectors
+ given by our Feature_extractor_type object inside the i-th feature extraction
+ zone.
+ - Then the feature vector for a sliding window is an M*N dimensional vector
+ [F(1) F(2) F(3) ... F(N)] (i.e. it is a concatenation of the N vectors).
+ This feature vector can be thought of as a collection of N "bags of features",
+ each bag coming from a spatial location determined by one of the rectangular
+ feature extraction zones.
+
+ 3. A weight vector and a threshold value. The dot product between the weight
+ vector and the feature vector for a sliding window location gives the score
+ of the window. If this score is greater than the threshold value then the
+ window location is output as a detection.
+
+ Finally, the sliding window classifiers described above are applied to every level of
+ an image pyramid. Moreover, some of the feature extraction zones are allowed to move
+ freely within the object box. This means that when we are sliding the classifier over
+ an image, some feature extraction zones are stationary (i.e. always in the same place
+ relative to the object box) while others are allowed to move anywhere within the object
+ box. In particular, the movable regions are placed at the locations that maximize the
+ score of the classifier. Note further that each of the movable feature extraction
+ zones must pass a threshold test for it to be included. That is, if the score that a
+ movable zone would contribute to the overall score for a sliding window location is not
+ positive then that zone is not included in the feature vector (i.e. its part of the
+ feature vector is set to zero. This way the length of the feature vector stays
+ constant). This movable region construction allows us to represent objects with parts
+ that move around relative to the object box. For example, a human has hands but they
+ aren't always in the same place relative to a person's bounding box.
+
+ THREAD SAFETY
+ Concurrent access to an instance of this object is not safe and should be protected
+ by a mutex lock except for the case where you are copying the configuration
+ (via copy_configuration()) of a scan_image_pyramid object to many other threads.
+ In this case, it is safe to copy the configuration of a shared object so long
+ as no other operations are performed on it.
+ !*/
+ public:
+
+ typedef matrix<double,0,1> feature_vector_type;
+
+ typedef Pyramid_type pyramid_type;
+ typedef Feature_extractor_type feature_extractor_type;
+
+ scan_image_pyramid (
+ );
+ /*!
+ ensures
+ - this object is properly initialized
+ !*/
+
+ template <
+ typename image_type
+ >
+ void load (
+ const image_type& img
+ );
+ /*!
+ requires
+ - image_type must be a type with the following properties:
+ - image_type is default constructable.
+ - image_type is swappable by the global swap() function.
+ - image_type logically represents some kind of image and therefore its
+ number of rows and columns can be queried via num_rows(img) and
+ num_columns(img) respectively.
+ - image_type objects can be loaded into Feature_extractor_type
+ objects via Feature_extractor_type::load().
+ - image_type objects can be used with Pyramid_type. That is,
+ if pyr is an object of type Pyramid_type while img1 and img2
+ are objects of image_type, then pyr(img1,img2) should be
+ a valid expression which downsamples img1 into img2.
+ ensures
+ - #is_loaded_with_image() == true
+ - This object is ready to run sliding window classifiers over img. Call
+ detect() to do this.
+ !*/
+
+ bool is_loaded_with_image (
+ ) const;
+ /*!
+ ensures
+ - returns true if this object has been loaded with an image to process
+ and false otherwise.
+ !*/
+
+ const feature_extractor_type& get_feature_extractor (
+ ) const;
+ /*!
+ ensures
+ - returns a const reference to the feature_extractor_type object used
+ internally for local feature extraction.
+ !*/
+
+ void copy_configuration(
+ const feature_extractor_type& fe
+ );
+ /*!
+ ensures
+ - This function performs the equivalent of
+ get_feature_extractor().copy_configuration(fe) (i.e. this function allows
+ you to configure the parameters of the underlying feature extractor used
+ by a scan_image_pyramid object)
+ !*/
+
+ void copy_configuration (
+ const scan_image_pyramid& item
+ );
+ /*!
+ ensures
+ - copies all the state information of item into *this, except for state
+ information populated by load(). More precisely, given two scan_image_pyramid
+ objects S1 and S2, the following sequence of instructions should always
+ result in both of them having the exact same state.
+ S2.copy_configuration(S1);
+ S1.load(img);
+ S2.load(img);
+ !*/
+
+ void add_detection_template (
+ const rectangle& object_box,
+ const std::vector<rectangle>& stationary_feature_extraction_regions,
+ const std::vector<rectangle>& movable_feature_extraction_regions
+ );
+ /*!
+ requires
+ - center(object_box) == point(0,0)
+ - for all valid i:
+ - center(movable_feature_extraction_regions[i]) == point(0,0)
+ - if (get_num_detection_templates() > 0) then
+ - get_num_stationary_components_per_detection_template() == stationary_feature_extraction_regions.size()
+ - get_num_movable_components_per_detection_template() == movable_feature_extraction_regions.size()
+ (i.e. if you already have detection templates in this object, then
+ any new detection template must declare a consistent number of
+ feature extraction regions)
+ ensures
+ - Adds another detection template to this object. In particular, object_box
+ defines the size and shape of a sliding window while stationary_feature_extraction_regions
+ and movable_feature_extraction_regions defines the locations for feature extraction as
+ discussed in the WHAT THIS OBJECT REPRESENTS section above. Note also that the locations of
+ the stationary feature extraction regions are relative to the object_box.
+ - #get_num_detection_templates() == get_num_detection_templates() + 1
+ - The order of rectangles in stationary_feature_extraction_regions and
+ movable_feature_extraction_regions matters. Recall that each rectangle
+ gets its own set of features. So given two different templates, their
+ i-th rectangles will both share the same part of the weight vector (i.e. the w
+ supplied to detect()). So there should be some reasonable correspondence
+ between the rectangle ordering in different detection templates. For,
+ example, different detection templates should place corresponding feature
+ extraction regions in roughly the same part of the object_box.
+ - #get_num_stationary_components_per_detection_template() = stationary_feature_extraction_regions.size()
+ - #get_num_movable_components_per_detection_template() = movable_feature_extraction_regions.size()
+ !*/
+
+ void add_detection_template (
+ const rectangle& object_box,
+ const std::vector<rectangle>& stationary_feature_extraction_regions
+ );
+ /*!
+ ensures
+ - calls add_detection_template(object_box, stationary_feature_extraction_regions, empty_list)
+ where empty_list is a vector of size 0. I.e. this function is just a convenience
+ routine for adding detection templates with no movable regions.
+ !*/
+
+ unsigned long get_num_detection_templates (
+ ) const;
+ /*!
+ ensures
+ - returns the number of detection templates in this object
+ !*/
+
+ unsigned long get_num_stationary_components_per_detection_template (
+ ) const;
+ /*!
+ requires
+ - get_num_detection_templates() > 0
+ ensures
+ - A detection template is a rectangle which defines the shape of a sliding
+ window (the object_box), as well as a set of rectangles which define
+ feature extraction zones. This function returns the number of stationary
+ feature extraction zones in the detection templates used by this object.
+ !*/
+
+ unsigned long get_num_movable_components_per_detection_template (
+ ) const;
+ /*!
+ requires
+ - get_num_detection_templates() > 0
+ ensures
+ - A detection template is a rectangle which defines the shape of a sliding
+ window (the object_box), as well as a set of rectangles which define
+ feature extraction zones. This function returns the number of movable
+ feature extraction zones in the detection templates used by this object.
+ !*/
+
+ unsigned long get_num_components_per_detection_template (
+ ) const;
+ /*!
+ requires
+ - get_num_detection_templates() > 0
+ ensures
+ - returns the total number of feature extraction zones in the detection
+ templates used by this object. That is, returns the following:
+ - get_num_movable_components_per_detection_template() +
+ get_num_stationary_components_per_detection_template()
+ !*/
+
+ long get_num_dimensions (
+ ) const;
+ /*!
+ requires
+ - get_num_detection_templates() > 0
+ ensures
+ - returns the number of dimensions in the feature vector for a sliding window
+ location. This value is the dimensionality of the underlying feature vectors
+ produced by Feature_extractor_type times (get_num_stationary_components_per_detection_template() +
+ get_num_movable_components_per_detection_template()).
+ !*/
+
+ unsigned long get_max_pyramid_levels (
+ ) const;
+ /*!
+ ensures
+ - returns the maximum number of image pyramid levels this object will use.
+ Note that #get_max_pyramid_levels() == 1 indicates that no image pyramid
+ will be used at all. That is, only the original image will be processed
+ and no lower scale versions will be created.
+ !*/
+
+ void set_max_pyramid_levels (
+ unsigned long max_levels
+ );
+ /*!
+ requires
+ - max_levels > 0
+ ensures
+ - #get_max_pyramid_levels() == max_levels
+ !*/
+
+ void set_min_pyramid_layer_size (
+ unsigned long width,
+ unsigned long height
+ );
+ /*!
+ requires
+ - width > 0
+ - height > 0
+ ensures
+ - #get_min_pyramid_layer_width() == width
+ - #get_min_pyramid_layer_height() == height
+ !*/
+
+ inline unsigned long get_min_pyramid_layer_width (
+ ) const;
+ /*!
+ ensures
+ - returns the smallest allowable width of an image in the image pyramid.
+ All pyramids will always include the original input image, however, no
+ pyramid levels will be created which have a width smaller than the
+ value returned by this function.
+ !*/
+
+ inline unsigned long get_min_pyramid_layer_height (
+ ) const;
+ /*!
+ ensures
+ - returns the smallest allowable height of an image in the image pyramid.
+ All pyramids will always include the original input image, however, no
+ pyramid levels will be created which have a height smaller than the
+ value returned by this function.
+ !*/
+
+ unsigned long get_max_detections_per_template (
+ ) const;
+ /*!
+ ensures
+ - For each image pyramid layer and detection template, this object scans a sliding
+ window classifier over an image and produces a number of detections. This
+ function returns a number which defines a hard upper limit on the number of
+ detections allowed by a single scan. This means that the total number of
+ possible detections produced by detect() is get_max_detections_per_template()*
+ get_num_detection_templates()*(number of image pyramid layers). Additionally,
+ if the maximum number of detections is reached during a scan then this object
+ will return a random subsample of all detections which are above the detection
+ threshold.
+ !*/
+
+ void set_max_detections_per_template (
+ unsigned long max_dets
+ );
+ /*!
+ requires
+ - max_dets > 0
+ ensures
+ - #get_max_detections_per_template() == max_dets
+ !*/
+
+ void detect (
+ const feature_vector_type& w,
+ std::vector<std::pair<double, rectangle> >& dets,
+ const double thresh
+ ) const;
+ /*!
+ requires
+ - w.size() >= get_num_dimensions()
+ - is_loaded_with_image() == true
+ - get_num_detection_templates() > 0
+ ensures
+ - Scans all the detection templates over all pyramid layers as discussed in the
+ WHAT THIS OBJECT REPRESENTS section and stores all detections into #dets.
+ - for all valid i:
+ - #dets[i].second == The object box which produced this detection. This rectangle gives
+ the location of the detection. Note that the rectangle will have been converted back into
+ the original image input space. That is, if this detection was made at a low level in the
+ image pyramid then the object box will have been automatically mapped up the pyramid layers
+ to the original image space. Or in other words, if you plot #dets[i].second on top of the
+ image given to load() it will show up in the right place.
+ - #dets[i].first == The score for this detection. This value is equal to dot(w, feature vector
+ for this sliding window location).
+ - #dets[i].first >= thresh
+ - #dets will be sorted in descending order. (i.e. #dets[i].first >= #dets[j].first for all i, and j>i)
+ - Elements of w beyond index get_num_dimensions()-1 are ignored. I.e. only the first
+ get_num_dimensions() are used.
+ - Note that no form of non-max suppression is performed. If a window has a score >= thresh
+ then it is reported in #dets (assuming the limit imposed by get_max_detections_per_template() hasn't
+ been reached).
+ !*/
+
+ const rectangle get_best_matching_rect (
+ const rectangle& rect
+ ) const;
+ /*!
+ requires
+ - get_num_detection_templates() > 0
+ ensures
+ - Since scan_image_pyramid is a sliding window classifier system, not all possible rectangles
+ can be represented. Therefore, this function allows you to supply a rectangle and obtain the
+ nearest possible sliding window rectangle.
+ !*/
+
+ void get_feature_vector (
+ const full_object_detection& obj,
+ feature_vector_type& psi
+ ) const;
+ /*!
+ requires
+ - all_parts_in_rect(obj) == true
+ - obj.num_parts() == get_num_movable_components_per_detection_template()
+ - is_loaded_with_image() == true
+ - get_num_detection_templates() > 0
+ - psi.size() >= get_num_dimensions()
+ (i.e. psi must have preallocated its memory before this function is called)
+ ensures
+ - This function allows you to determine the feature vector used for a
+ sliding window location. Note that this vector is added to psi. Note
+ also that you must use get_full_object_detection() to convert a rect from
+ detect() into the needed full_object_detection.
+ - The dimensionality of the vector added to psi is get_num_dimensions(). This
+ means that elements of psi after psi(get_num_dimensions()-1) are not modified.
+ - Since scan_image_pyramid is a sliding window classifier system, not all
+ possible rectangles can be output by detect(). So in the case where
+ obj.get_rect() could not arise from a call to detect(), this function
+ will map obj.get_rect() to the nearest possible object box and then add
+ the feature vector for the mapped rectangle into #psi.
+ - get_best_matching_rect(obj.get_rect()) == the rectangle obj.get_rect()
+ gets mapped to for feature extraction.
+ !*/
+
+ full_object_detection get_full_object_detection (
+ const rectangle& rect,
+ const feature_vector_type& w
+ ) const;
+ /*!
+ requires
+ - w.size() >= get_num_dimensions()
+ - is_loaded_with_image() == true
+ - get_num_detection_templates() > 0
+ ensures
+ - This function allows you to determine the full_object_detection
+ corresponding to a sliding window location. Note that the detect()
+ routine doesn't return the locations of the movable parts in a detected
+ object. Therefore, if you are using any movable parts in your model you
+ must use get_full_object_detection() to find out where the movable parts
+ were detected. To do this, you supply the w and detected rectangle.
+ Then the corresponding fully populated full_object_detection will be
+ returned.
+ - returns a full_object_detection, OBJ, such that:
+ - OBJ.get_rect() == rect
+ - OBJ.num_parts() == get_num_movable_components_per_detection_template()
+ - OBJ.part(i) == the location of the i-th movable part inside this detection,
+ or OBJECT_PART_NOT_PRESENT if the part was not found.
+ !*/
+
+ };
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename Pyramid_type,
+ typename Feature_extractor_type
+ >
+ void serialize (
+ const scan_image_pyramid<Pyramid_type,Feature_extractor_type>& item,
+ std::ostream& out
+ );
+ /*!
+ provides serialization support
+ !*/
+
+ template <
+ typename Pyramid_type,
+ typename Feature_extractor_type
+ >
+ void deserialize (
+ scan_image_pyramid<Pyramid_type,Feature_extractor_type>& item,
+ std::istream& in
+ );
+ /*!
+ provides deserialization support
+ !*/
+
+// ----------------------------------------------------------------------------------------
+
+}
+
+#endif // DLIB_SCAN_IMaGE_PYRAMID_ABSTRACT_Hh_
+
+
diff --git a/ml/dlib/dlib/image_processing/scan_image_pyramid_tools.h b/ml/dlib/dlib/image_processing/scan_image_pyramid_tools.h
new file mode 100644
index 000000000..874b995b4
--- /dev/null
+++ b/ml/dlib/dlib/image_processing/scan_image_pyramid_tools.h
@@ -0,0 +1,180 @@
+// Copyright (C) 2011 Davis E. King (davis@dlib.net)
+// License: Boost Software License See LICENSE.txt for the full license.
+#ifndef DLIB_SCAN_IMaGE_PYRAMID_TOOLS_Hh_
+#define DLIB_SCAN_IMaGE_PYRAMID_TOOLS_Hh_
+
+#include "scan_image_pyramid_tools_abstract.h"
+#include "../statistics.h"
+#include <list>
+#include "../geometry.h"
+#include <iostream>
+
+namespace dlib
+{
+
+// ----------------------------------------------------------------------------------------
+
+ namespace impl
+ {
+ inline bool compare_first (
+ const std::pair<unsigned long,rectangle>& a,
+ const std::pair<unsigned long,rectangle>& b
+ )
+ {
+ return a.first < b.first;
+ }
+ }
+
+
+ template <typename image_scanner_type>
+ std::vector<rectangle> determine_object_boxes (
+ const image_scanner_type& scanner,
+ const std::vector<rectangle>& rects,
+ double min_match_score
+ )
+ {
+ // make sure requires clause is not broken
+ DLIB_ASSERT(0 < min_match_score && min_match_score <= 1,
+ "\t std::vector<rectangle> determine_object_boxes()"
+ << "\n\t Invalid inputs were given to this function. "
+ << "\n\t min_match_score: " << min_match_score
+ );
+
+ typename image_scanner_type::pyramid_type pyr;
+
+ typedef std::list<std::pair<unsigned long, rectangle> > list_type;
+
+ unsigned long max_area = 0;
+
+ // Copy rects into sorted_rects and sort them in order of increasing area. But
+ // only include the rectangles that aren't already obtainable by the scanner.
+ list_type sorted_rects;
+ for (unsigned long i = 0; i < rects.size(); ++i)
+ {
+ if (scanner.get_num_detection_templates() > 0)
+ {
+ rectangle temp = scanner.get_best_matching_rect(rects[i]);
+ const double match_score = (rects[i].intersect(temp).area())/(double)(rects[i] + temp).area();
+ // skip this rectangle if it's already matched well enough.
+ if (match_score > min_match_score)
+ continue;
+ }
+ max_area = std::max(rects[i].area(), max_area);
+ sorted_rects.push_back(std::make_pair(rects[i].area(), rects[i]));
+ }
+ sorted_rects.sort(dlib::impl::compare_first);
+
+ // Make sure this area value is comfortably larger than all the
+ // rectangles' areas.
+ max_area = 3*max_area + 100;
+
+ std::vector<rectangle> object_boxes;
+
+ while (sorted_rects.size() != 0)
+ {
+ rectangle cur = sorted_rects.front().second;
+ sorted_rects.pop_front();
+ object_boxes.push_back(centered_rect(point(0,0), cur.width(), cur.height()));
+
+ // Scale cur up the image pyramid and remove any rectangles which match.
+ // But also stop when cur gets large enough to not match anything.
+ for (unsigned long itr = 0;
+ itr < scanner.get_max_pyramid_levels() && cur.area() < max_area;
+ ++itr)
+ {
+ list_type::iterator i = sorted_rects.begin();
+ while (i != sorted_rects.end())
+ {
+ const rectangle temp = move_rect(i->second, cur.tl_corner());
+ const double match_score = (cur.intersect(temp).area())/(double)(cur + temp).area();
+ if (match_score > min_match_score)
+ {
+ i = sorted_rects.erase(i);
+ }
+ else
+ {
+ ++i;
+ }
+ }
+
+ cur = pyr.rect_up(cur);
+ }
+
+ }
+
+ return object_boxes;
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <typename image_scanner_type>
+ std::vector<rectangle> determine_object_boxes (
+ const image_scanner_type& scanner,
+ const std::vector<std::vector<rectangle> >& rects,
+ double min_match_score
+ )
+ {
+ // make sure requires clause is not broken
+ DLIB_ASSERT(0 < min_match_score && min_match_score <= 1,
+ "\t std::vector<rectangle> determine_object_boxes()"
+ << "\n\t Invalid inputs were given to this function. "
+ << "\n\t min_match_score: " << min_match_score
+ );
+
+ std::vector<rectangle> temp;
+ for (unsigned long i = 0; i < rects.size(); ++i)
+ {
+ for (unsigned long j = 0; j < rects[i].size(); ++j)
+ {
+ temp.push_back(rects[i][j]);
+ }
+ }
+
+ return determine_object_boxes(scanner, temp, min_match_score);
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <typename image_scanner_type>
+ void setup_grid_detection_templates (
+ image_scanner_type& scanner,
+ const std::vector<std::vector<rectangle> >& rects,
+ unsigned int cells_x,
+ unsigned int cells_y,
+ double min_match_score = 0.75
+ )
+ {
+ const std::vector<rectangle>& object_boxes = determine_object_boxes(scanner, rects, min_match_score);
+ for (unsigned long i = 0; i < object_boxes.size(); ++i)
+ {
+ scanner.add_detection_template(object_boxes[i], create_grid_detection_template(object_boxes[i], cells_x, cells_y));
+ }
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <typename image_scanner_type>
+ void setup_grid_detection_templates_verbose (
+ image_scanner_type& scanner,
+ const std::vector<std::vector<rectangle> >& rects,
+ unsigned int cells_x,
+ unsigned int cells_y,
+ double min_match_score = 0.75
+ )
+ {
+ const std::vector<rectangle>& object_boxes = determine_object_boxes(scanner, rects, min_match_score);
+ std::cout << "number of detection templates: "<< object_boxes.size() << std::endl;
+ for (unsigned long i = 0; i < object_boxes.size(); ++i)
+ {
+ std::cout << " object box " << i << ": width: " << object_boxes[i].width()
+ << " height: "<< object_boxes[i].height() << std::endl;
+ scanner.add_detection_template(object_boxes[i], create_grid_detection_template(object_boxes[i], cells_x, cells_y));
+ }
+ }
+
+// ----------------------------------------------------------------------------------------
+
+}
+
+#endif // DLIB_SCAN_IMaGE_PYRAMID_TOOLS_Hh_
+
diff --git a/ml/dlib/dlib/image_processing/scan_image_pyramid_tools_abstract.h b/ml/dlib/dlib/image_processing/scan_image_pyramid_tools_abstract.h
new file mode 100644
index 000000000..83a572df7
--- /dev/null
+++ b/ml/dlib/dlib/image_processing/scan_image_pyramid_tools_abstract.h
@@ -0,0 +1,118 @@
+// Copyright (C) 2011 Davis E. King (davis@dlib.net)
+// License: Boost Software License See LICENSE.txt for the full license.
+#undef DLIB_SCAN_IMaGE_PYRAMID_TOOLS_ABSTRACT_Hh_
+#ifdef DLIB_SCAN_IMaGE_PYRAMID_TOOLS_ABSTRACT_Hh_
+
+#include "scan_image_pyramid_abstract.h"
+#include <vector>
+#include "../geometry.h"
+
+namespace dlib
+{
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename image_scanner_type
+ >
+ std::vector<rectangle> determine_object_boxes (
+ const image_scanner_type& scanner,
+ const std::vector<rectangle>& rects,
+ double min_match_score
+ );
+ /*!
+ requires
+ - 0 < min_match_score <= 1
+ - image_scanner_type == an implementation of the scan_image_pyramid
+ object defined in dlib/image_processing/scan_image_pyramid_tools_abstract.h
+ ensures
+ - returns a set of object boxes which, when used as detection templates with
+ the given scanner, can attain at least min_match_score alignment with every
+ element of rects. Note that the alignment between two rectangles A and B is
+ defined as:
+ (A.intersect(B).area())/(double)(A+B).area()
+ - Only elements of rects which are not already well matched by the scanner are
+ considered. That is, if the scanner already has some detection templates in
+ it then the contents of rects will be checked against those detection
+ templates and elements with a match better than min_match_score are ignore.
+ !*/
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename image_scanner_type
+ >
+ std::vector<rectangle> determine_object_boxes (
+ const image_scanner_type& scanner,
+ const std::vector<std::vector<rectangle> >& rects,
+ double min_match_score
+ );
+ /*!
+ requires
+ - 0 < min_match_score <= 1
+ - image_scanner_type == an implementation of the scan_image_pyramid
+ object defined in dlib/image_processing/scan_image_pyramid_tools_abstract.h
+ ensures
+ - copies all rectangles in rects into a std::vector<rectangle> object, call it
+ R. Then this function returns determine_object_boxes(scanner,R,min_match_score).
+ That is, it just called the version of determine_object_boxes() defined above
+ and returns the results.
+ !*/
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename image_scanner_type
+ >
+ void setup_grid_detection_templates (
+ image_scanner_type& scanner,
+ const std::vector<std::vector<rectangle> >& rects,
+ unsigned int cells_x,
+ unsigned int cells_y,
+ double min_match_score = 0.75
+ );
+ /*!
+ requires
+ - cells_x > 0
+ - cells_y > 0
+ - 0 < min_match_score <= 1
+ - image_scanner_type == an implementation of the scan_image_pyramid
+ object defined in dlib/image_processing/scan_image_pyramid_tools_abstract.h
+ ensures
+ - uses determine_object_boxes(scanner,rects,min_match_score) to obtain a set of
+ object boxes and then adds them to the given scanner object as detection templates.
+ Also uses create_grid_detection_template(object_box, cells_x, cells_y) to create
+ each feature extraction region. Therefore, the detection templates will extract
+ features from a regular grid inside each object box.
+ !*/
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename image_scanner_type
+ >
+ void setup_grid_detection_templates_verbose (
+ image_scanner_type& scanner,
+ const std::vector<std::vector<rectangle> >& rects,
+ unsigned int cells_x,
+ unsigned int cells_y,
+ double min_match_score = 0.75
+ );
+ /*!
+ requires
+ - cells_x > 0
+ - cells_y > 0
+ - 0 < min_match_score <= 1
+ - image_scanner_type == an implementation of the scan_image_pyramid
+ object defined in dlib/image_processing/scan_image_pyramid_tools_abstract.h
+ ensures
+ - this function is identical to setup_grid_detection_templates() except
+ that it also outputs the selected detection templates to standard out.
+ !*/
+
+// ----------------------------------------------------------------------------------------
+
+}
+
+#endif // DLIB_SCAN_IMaGE_PYRAMID_TOOLS_ABSTRACT_Hh_
+
diff --git a/ml/dlib/dlib/image_processing/setup_hashed_features.h b/ml/dlib/dlib/image_processing/setup_hashed_features.h
new file mode 100644
index 000000000..5b82cecb4
--- /dev/null
+++ b/ml/dlib/dlib/image_processing/setup_hashed_features.h
@@ -0,0 +1,219 @@
+// Copyright (C) 2011 Davis E. King (davis@dlib.net)
+// License: Boost Software License See LICENSE.txt for the full license.
+#ifndef DLIB_SETUP_HAShED_FEATURES_Hh_
+#define DLIB_SETUP_HAShED_FEATURES_Hh_
+
+#include "setup_hashed_features_abstract.h"
+#include "scan_image_pyramid.h"
+#include "scan_image_boxes.h"
+#include "../lsh.h"
+#include "../statistics.h"
+#include "../image_keypoint.h"
+#include "../geometry.h"
+
+namespace dlib
+{
+
+// ----------------------------------------------------------------------------------------
+
+ class image_hash_construction_failure : public error
+ {
+ public:
+ image_hash_construction_failure(
+ const std::string& a
+ ): error(a) {}
+ };
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename image_scanner
+ >
+ void use_uniform_feature_weights (
+ image_scanner& scanner
+ )
+ {
+ typename image_scanner::feature_extractor_type fe;
+ fe.copy_configuration(scanner.get_feature_extractor());
+ fe.use_uniform_feature_weights();
+ scanner.copy_configuration(fe);
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename image_scanner
+ >
+ void use_relative_feature_weights (
+ image_scanner& scanner
+ )
+ {
+ typename image_scanner::feature_extractor_type fe;
+ fe.copy_configuration(scanner.get_feature_extractor());
+ fe.use_relative_feature_weights();
+ scanner.copy_configuration(fe);
+ }
+
+// ----------------------------------------------------------------------------------------
+// ----------------------------------------------------------------------------------------
+// stuff for scan_image_pyramid
+// ----------------------------------------------------------------------------------------
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename image_array,
+ typename pyramid,
+ typename feature_extractor,
+ template <typename fe, typename hash> class feature_image
+ >
+ void setup_hashed_features (
+ scan_image_pyramid<pyramid, feature_image<feature_extractor, projection_hash> >& scanner,
+ const image_array& images,
+ const feature_extractor& fe,
+ int bits,
+ unsigned long num_samples = 200000
+ )
+ {
+ // make sure requires clause is not broken
+ DLIB_ASSERT(0 < bits && bits <= 32 &&
+ num_samples > 1 &&
+ images.size() > 0,
+ "\t void setup_hashed_features()"
+ << "\n\t Invalid inputs were given to this function. "
+ << "\n\t bits: " << bits
+ << "\n\t num_samples: " << num_samples
+ << "\n\t images.size(): " << images.size()
+ );
+
+ pyramid pyr;
+
+ const random_subset_selector<typename feature_extractor::descriptor_type>& samps =
+ randomly_sample_image_features(images, pyr, fe, num_samples);
+
+ if (samps.size() <= 1)
+ throw dlib::image_hash_construction_failure("Images too small, not able to gather enough samples to make hash");
+
+ projection_hash phash = create_random_projection_hash(samps, bits);
+
+ feature_image<feature_extractor, projection_hash> hfe;
+ hfe.copy_configuration(scanner.get_feature_extractor());
+ hfe.set_hash(phash);
+ hfe.copy_configuration(fe);
+ scanner.copy_configuration(hfe);
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename image_array,
+ typename pyramid,
+ typename feature_extractor,
+ template <typename fe, typename hash> class feature_image
+ >
+ void setup_hashed_features (
+ scan_image_pyramid<pyramid, feature_image<feature_extractor, projection_hash> >& scanner,
+ const image_array& images,
+ int bits,
+ unsigned long num_samples = 200000
+ )
+ {
+ // make sure requires clause is not broken
+ DLIB_ASSERT(0 < bits && bits <= 32 &&
+ num_samples > 1 &&
+ images.size() > 0,
+ "\t void setup_hashed_features()"
+ << "\n\t Invalid inputs were given to this function. "
+ << "\n\t bits: " << bits
+ << "\n\t num_samples: " << num_samples
+ << "\n\t images.size(): " << images.size()
+ );
+
+ feature_extractor fe;
+ setup_hashed_features(scanner, images, fe, bits, num_samples);
+ }
+
+// ----------------------------------------------------------------------------------------
+// ----------------------------------------------------------------------------------------
+// stuff for scan_image_boxes
+// ----------------------------------------------------------------------------------------
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename image_array,
+ typename feature_extractor,
+ template <typename fe, typename hash> class feature_image,
+ typename box_generator
+ >
+ void setup_hashed_features (
+ scan_image_boxes<feature_image<feature_extractor, projection_hash>,box_generator >& scanner,
+ const image_array& images,
+ const feature_extractor& fe,
+ int bits,
+ unsigned long num_samples = 200000
+ )
+ {
+ // make sure requires clause is not broken
+ DLIB_ASSERT(0 < bits && bits <= 32 &&
+ num_samples > 1 &&
+ images.size() > 0,
+ "\t void setup_hashed_features()"
+ << "\n\t Invalid inputs were given to this function. "
+ << "\n\t bits: " << bits
+ << "\n\t num_samples: " << num_samples
+ << "\n\t images.size(): " << images.size()
+ );
+
+ pyramid_disable pyr;
+
+ const random_subset_selector<typename feature_extractor::descriptor_type>& samps =
+ randomly_sample_image_features(images, pyr, fe, num_samples);
+
+ if (samps.size() <= 1)
+ throw dlib::image_hash_construction_failure("Images too small, not able to gather enough samples to make hash");
+
+ projection_hash phash = create_random_projection_hash(samps, bits);
+
+ feature_image<feature_extractor, projection_hash> hfe;
+ hfe.copy_configuration(scanner.get_feature_extractor());
+ hfe.set_hash(phash);
+ hfe.copy_configuration(fe);
+ scanner.copy_configuration(hfe);
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename image_array,
+ typename feature_extractor,
+ template <typename fe, typename hash> class feature_image,
+ typename box_generator
+ >
+ void setup_hashed_features (
+ scan_image_boxes<feature_image<feature_extractor, projection_hash>,box_generator>& scanner,
+ const image_array& images,
+ int bits,
+ unsigned long num_samples = 200000
+ )
+ {
+ // make sure requires clause is not broken
+ DLIB_ASSERT(0 < bits && bits <= 32 &&
+ num_samples > 1 &&
+ images.size() > 0,
+ "\t void setup_hashed_features()"
+ << "\n\t Invalid inputs were given to this function. "
+ << "\n\t bits: " << bits
+ << "\n\t num_samples: " << num_samples
+ << "\n\t images.size(): " << images.size()
+ );
+
+ feature_extractor fe;
+ setup_hashed_features(scanner, images, fe, bits, num_samples);
+ }
+
+// ----------------------------------------------------------------------------------------
+
+}
+
+#endif // DLIB_SETUP_HAShED_FEATURES_Hh_
+
+
diff --git a/ml/dlib/dlib/image_processing/setup_hashed_features_abstract.h b/ml/dlib/dlib/image_processing/setup_hashed_features_abstract.h
new file mode 100644
index 000000000..886411cd4
--- /dev/null
+++ b/ml/dlib/dlib/image_processing/setup_hashed_features_abstract.h
@@ -0,0 +1,210 @@
+// Copyright (C) 2011 Davis E. King (davis@dlib.net)
+// License: Boost Software License See LICENSE.txt for the full license.
+#undef DLIB_SETUP_HAShED_FEATURES_ABSTRACT_Hh_
+#ifdef DLIB_SETUP_HAShED_FEATURES_ABSTRACT_Hh_
+
+#include "scan_image_pyramid_abstract.h"
+#include "scan_image_boxes_abstract.h"
+#include "../lsh/projection_hash_abstract.h"
+#include "../image_keypoint/hashed_feature_image_abstract.h"
+#include "../image_keypoint/binned_vector_feature_image_abstract.h"
+
+namespace dlib
+{
+
+// ----------------------------------------------------------------------------------------
+
+ class image_hash_construction_failure : public error
+ {
+ /*!
+ WHAT THIS OBJECT REPRESENTS
+ This is the exception object used by the routines in this file.
+ !*/
+ };
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename image_scanner
+ >
+ void use_uniform_feature_weights (
+ image_scanner& scanner
+ );
+ /*!
+ requires
+ - image_scanner should be either scan_image_pyramid or scan_image_boxes and
+ should use the hashed_feature_image as its local feature extractor.
+ ensures
+ - #scanner.get_feature_extractor().uses_uniform_feature_weights() == true
+ (i.e. Make the scanner's feature extractor use the uniform feature weighting
+ scheme)
+ !*/
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename image_scanner
+ >
+ void use_relative_feature_weights (
+ image_scanner& scanner
+ );
+ /*!
+ requires
+ - image_scanner should be either scan_image_pyramid or scan_image_boxes and
+ should use the hashed_feature_image as its local feature extractor.
+ ensures
+ - #scanner.get_feature_extractor().uses_uniform_feature_weights() == false
+ (i.e. Make the scanner's feature extractor use the relative feature weighting
+ scheme)
+ !*/
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename image_array,
+ typename pyramid,
+ typename feature_extractor
+ template <typename fe, typename hash> class feature_image
+ >
+ void setup_hashed_features (
+ scan_image_pyramid<pyramid, feature_image<feature_extractor, projection_hash> >& scanner,
+ const image_array& images,
+ const feature_extractor& fe,
+ int bits,
+ unsigned long num_samples = 200000
+ );
+ /*!
+ requires
+ - 0 < bits <= 32
+ - num_samples > 1
+ - images.size() > 0
+ - it must be valid to pass images[0] into scanner.load().
+ (also, image_array must be an implementation of dlib/array/array_kernel_abstract.h)
+ - feature_image == must be either hashed_feature_image, binned_vector_feature_image,
+ or a type with a compatible interface.
+ ensures
+ - Creates a projection_hash suitable for hashing the feature vectors produced by
+ fe and then configures scanner to use this hash function.
+ - The hash function will map vectors into integers in the range [0, pow(2,bits))
+ - The hash function will be setup so that it hashes a random sample of num_samples
+ vectors from fe such that each bin ends up with roughly the same number of
+ elements in it.
+ throws
+ - image_hash_construction_failure
+ This exception is thrown if there is a problem creating the projection_hash.
+ This should only happen the images are so small they contain less than 2
+ feature vectors.
+ !*/
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename image_array,
+ typename pyramid,
+ typename feature_extractor
+ template <typename fe, typename hash> class feature_image
+ >
+ void setup_hashed_features (
+ scan_image_pyramid<pyramid, feature_image<feature_extractor, projection_hash> >& scanner,
+ const image_array& images,
+ int bits,
+ unsigned long num_samples = 200000
+ );
+ /*!
+ requires
+ - 0 < bits <= 32
+ - num_samples > 1
+ - images.size() > 0
+ - it must be valid to pass images[0] into scanner.load().
+ (also, image_array must be an implementation of dlib/array/array_kernel_abstract.h)
+ - feature_image == must be either hashed_feature_image, binned_vector_feature_image,
+ or a type with a compatible interface.
+ ensures
+ - performs: setup_hashed_features(scanner, images, feature_extractor(), bits, num_samples)
+ throws
+ - image_hash_construction_failure
+ This exception is thrown if there is a problem creating the projection_hash.
+ This should only happen the images are so small they contain less than 2
+ feature vectors.
+ !*/
+
+// ----------------------------------------------------------------------------------------
+// ----------------------------------------------------------------------------------------
+// ----------------------------------------------------------------------------------------
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename image_array,
+ typename feature_extractor,
+ template <typename fe, typename hash> class feature_image
+ typename box_generator
+ >
+ void setup_hashed_features (
+ scan_image_boxes<feature_image<feature_extractor, projection_hash>,box_generator>& scanner,
+ const image_array& images,
+ const feature_extractor& fe,
+ int bits,
+ unsigned long num_samples = 200000
+ );
+ /*!
+ requires
+ - 0 < bits <= 32
+ - num_samples > 1
+ - images.size() > 0
+ - it must be valid to pass images[0] into scanner.load().
+ (also, image_array must be an implementation of dlib/array/array_kernel_abstract.h)
+ - feature_image == must be either hashed_feature_image, binned_vector_feature_image,
+ or a type with a compatible interface.
+ ensures
+ - Creates a projection_hash suitable for hashing the feature vectors produced by
+ fe and then configures scanner to use this hash function.
+ - The hash function will map vectors into integers in the range [0, pow(2,bits))
+ - The hash function will be setup so that it hashes a random sample of num_samples
+ vectors from fe such that each bin ends up with roughly the same number of
+ elements in it.
+ throws
+ - image_hash_construction_failure
+ This exception is thrown if there is a problem creating the projection_hash.
+ This should only happen the images are so small they contain less than 2
+ feature vectors.
+ !*/
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename image_array,
+ typename feature_extractor,
+ template <typename fe, typename hash> class feature_image
+ typename box_generator
+ >
+ void setup_hashed_features (
+ scan_image_boxes<feature_image<feature_extractor, projection_hash>,box_generator>& scanner,
+ const image_array& images,
+ int bits,
+ unsigned long num_samples = 200000
+ );
+ /*!
+ requires
+ - 0 < bits <= 32
+ - num_samples > 1
+ - images.size() > 0
+ - it must be valid to pass images[0] into scanner.load().
+ (also, image_array must be an implementation of dlib/array/array_kernel_abstract.h)
+ - feature_image == must be either hashed_feature_image, binned_vector_feature_image,
+ or a type with a compatible interface.
+ ensures
+ - performs: setup_hashed_features(scanner, images, feature_extractor(), bits, num_samples)
+ throws
+ - image_hash_construction_failure
+ This exception is thrown if there is a problem creating the projection_hash.
+ This should only happen the images are so small they contain less than 2
+ feature vectors.
+ !*/
+
+// ----------------------------------------------------------------------------------------
+
+}
+
+#endif // DLIB_SETUP_HAShED_FEATURES_ABSTRACT_Hh_
+
+
diff --git a/ml/dlib/dlib/image_processing/shape_predictor.h b/ml/dlib/dlib/image_processing/shape_predictor.h
new file mode 100644
index 000000000..05e9a60fd
--- /dev/null
+++ b/ml/dlib/dlib/image_processing/shape_predictor.h
@@ -0,0 +1,524 @@
+// Copyright (C) 2014 Davis E. King (davis@dlib.net)
+// License: Boost Software License See LICENSE.txt for the full license.
+#ifndef DLIB_SHAPE_PREDICToR_H_
+#define DLIB_SHAPE_PREDICToR_H_
+
+#include "shape_predictor_abstract.h"
+#include "full_object_detection.h"
+#include "../algs.h"
+#include "../matrix.h"
+#include "../geometry.h"
+#include "../pixel.h"
+#include "../statistics.h"
+#include <utility>
+
+namespace dlib
+{
+
+// ----------------------------------------------------------------------------------------
+
+ namespace impl
+ {
+ struct split_feature
+ {
+ unsigned long idx1;
+ unsigned long idx2;
+ float thresh;
+
+ friend inline void serialize (const split_feature& item, std::ostream& out)
+ {
+ dlib::serialize(item.idx1, out);
+ dlib::serialize(item.idx2, out);
+ dlib::serialize(item.thresh, out);
+ }
+ friend inline void deserialize (split_feature& item, std::istream& in)
+ {
+ dlib::deserialize(item.idx1, in);
+ dlib::deserialize(item.idx2, in);
+ dlib::deserialize(item.thresh, in);
+ }
+ };
+
+
+ // a tree is just a std::vector<impl::split_feature>. We use this function to navigate the
+ // tree nodes
+ inline unsigned long left_child (unsigned long idx) { return 2*idx + 1; }
+ /*!
+ ensures
+ - returns the index of the left child of the binary tree node idx
+ !*/
+ inline unsigned long right_child (unsigned long idx) { return 2*idx + 2; }
+ /*!
+ ensures
+ - returns the index of the left child of the binary tree node idx
+ !*/
+
+ struct regression_tree
+ {
+ std::vector<split_feature> splits;
+ std::vector<matrix<float,0,1> > leaf_values;
+
+ unsigned long num_leaves() const { return leaf_values.size(); }
+
+ inline const matrix<float,0,1>& operator()(
+ const std::vector<float>& feature_pixel_values,
+ unsigned long& i
+ ) const
+ /*!
+ requires
+ - All the index values in splits are less than feature_pixel_values.size()
+ - leaf_values.size() is a power of 2.
+ (i.e. we require a tree with all the levels fully filled out.
+ - leaf_values.size() == splits.size()+1
+ (i.e. there needs to be the right number of leaves given the number of splits in the tree)
+ ensures
+ - runs through the tree and returns the vector at the leaf we end up in.
+ - #i == the selected leaf node index.
+ !*/
+ {
+ i = 0;
+ while (i < splits.size())
+ {
+ if ((float)feature_pixel_values[splits[i].idx1] - (float)feature_pixel_values[splits[i].idx2] > splits[i].thresh)
+ i = left_child(i);
+ else
+ i = right_child(i);
+ }
+ i = i - splits.size();
+ return leaf_values[i];
+ }
+
+ friend void serialize (const regression_tree& item, std::ostream& out)
+ {
+ dlib::serialize(item.splits, out);
+ dlib::serialize(item.leaf_values, out);
+ }
+ friend void deserialize (regression_tree& item, std::istream& in)
+ {
+ dlib::deserialize(item.splits, in);
+ dlib::deserialize(item.leaf_values, in);
+ }
+ };
+
+ // ------------------------------------------------------------------------------------
+
+ inline vector<float,2> location (
+ const matrix<float,0,1>& shape,
+ unsigned long idx
+ )
+ /*!
+ requires
+ - idx < shape.size()/2
+ - shape.size()%2 == 0
+ ensures
+ - returns the idx-th point from the shape vector.
+ !*/
+ {
+ return vector<float,2>(shape(idx*2), shape(idx*2+1));
+ }
+
+ // ------------------------------------------------------------------------------------
+
+ inline unsigned long nearest_shape_point (
+ const matrix<float,0,1>& shape,
+ const dlib::vector<float,2>& pt
+ )
+ {
+ // find the nearest part of the shape to this pixel
+ float best_dist = std::numeric_limits<float>::infinity();
+ const unsigned long num_shape_parts = shape.size()/2;
+ unsigned long best_idx = 0;
+ for (unsigned long j = 0; j < num_shape_parts; ++j)
+ {
+ const float dist = length_squared(location(shape,j)-pt);
+ if (dist < best_dist)
+ {
+ best_dist = dist;
+ best_idx = j;
+ }
+ }
+ return best_idx;
+ }
+
+ // ------------------------------------------------------------------------------------
+
+ inline void create_shape_relative_encoding (
+ const matrix<float,0,1>& shape,
+ const std::vector<dlib::vector<float,2> >& pixel_coordinates,
+ std::vector<unsigned long>& anchor_idx,
+ std::vector<dlib::vector<float,2> >& deltas
+ )
+ /*!
+ requires
+ - shape.size()%2 == 0
+ - shape.size() > 0
+ ensures
+ - #anchor_idx.size() == pixel_coordinates.size()
+ - #deltas.size() == pixel_coordinates.size()
+ - for all valid i:
+ - pixel_coordinates[i] == location(shape,#anchor_idx[i]) + #deltas[i]
+ !*/
+ {
+ anchor_idx.resize(pixel_coordinates.size());
+ deltas.resize(pixel_coordinates.size());
+
+
+ for (unsigned long i = 0; i < pixel_coordinates.size(); ++i)
+ {
+ anchor_idx[i] = nearest_shape_point(shape, pixel_coordinates[i]);
+ deltas[i] = pixel_coordinates[i] - location(shape,anchor_idx[i]);
+ }
+ }
+
+ // ------------------------------------------------------------------------------------
+
+ inline point_transform_affine find_tform_between_shapes (
+ const matrix<float,0,1>& from_shape,
+ const matrix<float,0,1>& to_shape
+ )
+ {
+ DLIB_ASSERT(from_shape.size() == to_shape.size() && (from_shape.size()%2) == 0 && from_shape.size() > 0,"");
+ std::vector<vector<float,2> > from_points, to_points;
+ const unsigned long num = from_shape.size()/2;
+ from_points.reserve(num);
+ to_points.reserve(num);
+ if (num == 1)
+ {
+ // Just use an identity transform if there is only one landmark.
+ return point_transform_affine();
+ }
+
+ for (unsigned long i = 0; i < num; ++i)
+ {
+ from_points.push_back(location(from_shape,i));
+ to_points.push_back(location(to_shape,i));
+ }
+ return find_similarity_transform(from_points, to_points);
+ }
+
+ // ------------------------------------------------------------------------------------
+
+ inline point_transform_affine normalizing_tform (
+ const rectangle& rect
+ )
+ /*!
+ ensures
+ - returns a transform that maps rect.tl_corner() to (0,0) and rect.br_corner()
+ to (1,1).
+ !*/
+ {
+ std::vector<vector<float,2> > from_points, to_points;
+ from_points.push_back(rect.tl_corner()); to_points.push_back(point(0,0));
+ from_points.push_back(rect.tr_corner()); to_points.push_back(point(1,0));
+ from_points.push_back(rect.br_corner()); to_points.push_back(point(1,1));
+ return find_affine_transform(from_points, to_points);
+ }
+
+ // ------------------------------------------------------------------------------------
+
+ inline point_transform_affine unnormalizing_tform (
+ const rectangle& rect
+ )
+ /*!
+ ensures
+ - returns a transform that maps (0,0) to rect.tl_corner() and (1,1) to
+ rect.br_corner().
+ !*/
+ {
+ std::vector<vector<float,2> > from_points, to_points;
+ to_points.push_back(rect.tl_corner()); from_points.push_back(point(0,0));
+ to_points.push_back(rect.tr_corner()); from_points.push_back(point(1,0));
+ to_points.push_back(rect.br_corner()); from_points.push_back(point(1,1));
+ return find_affine_transform(from_points, to_points);
+ }
+
+ // ------------------------------------------------------------------------------------
+
+ template <typename image_type, typename feature_type>
+ void extract_feature_pixel_values (
+ const image_type& img_,
+ const rectangle& rect,
+ const matrix<float,0,1>& current_shape,
+ const matrix<float,0,1>& reference_shape,
+ const std::vector<unsigned long>& reference_pixel_anchor_idx,
+ const std::vector<dlib::vector<float,2> >& reference_pixel_deltas,
+ std::vector<feature_type>& feature_pixel_values
+ )
+ /*!
+ requires
+ - image_type == an image object that implements the interface defined in
+ dlib/image_processing/generic_image.h
+ - reference_pixel_anchor_idx.size() == reference_pixel_deltas.size()
+ - current_shape.size() == reference_shape.size()
+ - reference_shape.size()%2 == 0
+ - max(mat(reference_pixel_anchor_idx)) < reference_shape.size()/2
+ ensures
+ - #feature_pixel_values.size() == reference_pixel_deltas.size()
+ - for all valid i:
+ - #feature_pixel_values[i] == the value of the pixel in img_ that
+ corresponds to the pixel identified by reference_pixel_anchor_idx[i]
+ and reference_pixel_deltas[i] when the pixel is located relative to
+ current_shape rather than reference_shape.
+ !*/
+ {
+ const matrix<float,2,2> tform = matrix_cast<float>(find_tform_between_shapes(reference_shape, current_shape).get_m());
+ const point_transform_affine tform_to_img = unnormalizing_tform(rect);
+
+ const rectangle area = get_rect(img_);
+
+ const_image_view<image_type> img(img_);
+ feature_pixel_values.resize(reference_pixel_deltas.size());
+ for (unsigned long i = 0; i < feature_pixel_values.size(); ++i)
+ {
+ // Compute the point in the current shape corresponding to the i-th pixel and
+ // then map it from the normalized shape space into pixel space.
+ point p = tform_to_img(tform*reference_pixel_deltas[i] + location(current_shape, reference_pixel_anchor_idx[i]));
+ if (area.contains(p))
+ feature_pixel_values[i] = get_pixel_intensity(img[p.y()][p.x()]);
+ else
+ feature_pixel_values[i] = 0;
+ }
+ }
+
+ } // end namespace impl
+
+// ----------------------------------------------------------------------------------------
+
+ class shape_predictor
+ {
+ public:
+
+
+ shape_predictor (
+ )
+ {}
+
+ shape_predictor (
+ const matrix<float,0,1>& initial_shape_,
+ const std::vector<std::vector<impl::regression_tree> >& forests_,
+ const std::vector<std::vector<dlib::vector<float,2> > >& pixel_coordinates
+ ) : initial_shape(initial_shape_), forests(forests_)
+ /*!
+ requires
+ - initial_shape.size()%2 == 0
+ - forests.size() == pixel_coordinates.size() == the number of cascades
+ - for all valid i:
+ - all the index values in forests[i] are less than pixel_coordinates[i].size()
+ - for all valid i and j:
+ - forests[i][j].leaf_values.size() is a power of 2.
+ (i.e. we require a tree with all the levels fully filled out.
+ - forests[i][j].leaf_values.size() == forests[i][j].splits.size()+1
+ (i.e. there need to be the right number of leaves given the number of splits in the tree)
+ !*/
+ {
+ anchor_idx.resize(pixel_coordinates.size());
+ deltas.resize(pixel_coordinates.size());
+ // Each cascade uses a different set of pixels for its features. We compute
+ // their representations relative to the initial shape now and save it.
+ for (unsigned long i = 0; i < pixel_coordinates.size(); ++i)
+ impl::create_shape_relative_encoding(initial_shape, pixel_coordinates[i], anchor_idx[i], deltas[i]);
+ }
+
+ unsigned long num_parts (
+ ) const
+ {
+ return initial_shape.size()/2;
+ }
+
+ unsigned long num_features (
+ ) const
+ {
+ unsigned long num = 0;
+ for (unsigned long iter = 0; iter < forests.size(); ++iter)
+ for (unsigned long i = 0; i < forests[iter].size(); ++i)
+ num += forests[iter][i].num_leaves();
+ return num;
+ }
+
+ template <typename image_type>
+ full_object_detection operator()(
+ const image_type& img,
+ const rectangle& rect
+ ) const
+ {
+ using namespace impl;
+ matrix<float,0,1> current_shape = initial_shape;
+ std::vector<float> feature_pixel_values;
+ for (unsigned long iter = 0; iter < forests.size(); ++iter)
+ {
+ extract_feature_pixel_values(img, rect, current_shape, initial_shape,
+ anchor_idx[iter], deltas[iter], feature_pixel_values);
+ unsigned long leaf_idx;
+ // evaluate all the trees at this level of the cascade.
+ for (unsigned long i = 0; i < forests[iter].size(); ++i)
+ current_shape += forests[iter][i](feature_pixel_values, leaf_idx);
+ }
+
+ // convert the current_shape into a full_object_detection
+ const point_transform_affine tform_to_img = unnormalizing_tform(rect);
+ std::vector<point> parts(current_shape.size()/2);
+ for (unsigned long i = 0; i < parts.size(); ++i)
+ parts[i] = tform_to_img(location(current_shape, i));
+ return full_object_detection(rect, parts);
+ }
+
+ template <typename image_type, typename T, typename U>
+ full_object_detection operator()(
+ const image_type& img,
+ const rectangle& rect,
+ std::vector<std::pair<T,U> >& feats
+ ) const
+ {
+ feats.clear();
+ using namespace impl;
+ matrix<float,0,1> current_shape = initial_shape;
+ std::vector<float> feature_pixel_values;
+ unsigned long feat_offset = 0;
+ for (unsigned long iter = 0; iter < forests.size(); ++iter)
+ {
+ extract_feature_pixel_values(img, rect, current_shape, initial_shape,
+ anchor_idx[iter], deltas[iter], feature_pixel_values);
+ // evaluate all the trees at this level of the cascade.
+ for (unsigned long i = 0; i < forests[iter].size(); ++i)
+ {
+ unsigned long leaf_idx;
+ current_shape += forests[iter][i](feature_pixel_values, leaf_idx);
+
+ feats.push_back(std::make_pair(feat_offset+leaf_idx, 1));
+ feat_offset += forests[iter][i].num_leaves();
+ }
+ }
+
+ // convert the current_shape into a full_object_detection
+ const point_transform_affine tform_to_img = unnormalizing_tform(rect);
+ std::vector<point> parts(current_shape.size()/2);
+ for (unsigned long i = 0; i < parts.size(); ++i)
+ parts[i] = tform_to_img(location(current_shape, i));
+ return full_object_detection(rect, parts);
+ }
+
+ friend void serialize (const shape_predictor& item, std::ostream& out);
+
+ friend void deserialize (shape_predictor& item, std::istream& in);
+
+ private:
+ matrix<float,0,1> initial_shape;
+ std::vector<std::vector<impl::regression_tree> > forests;
+ std::vector<std::vector<unsigned long> > anchor_idx;
+ std::vector<std::vector<dlib::vector<float,2> > > deltas;
+ };
+
+ inline void serialize (const shape_predictor& item, std::ostream& out)
+ {
+ int version = 1;
+ dlib::serialize(version, out);
+ dlib::serialize(item.initial_shape, out);
+ dlib::serialize(item.forests, out);
+ dlib::serialize(item.anchor_idx, out);
+ dlib::serialize(item.deltas, out);
+ }
+
+ inline void deserialize (shape_predictor& item, std::istream& in)
+ {
+ int version = 0;
+ dlib::deserialize(version, in);
+ if (version != 1)
+ throw serialization_error("Unexpected version found while deserializing dlib::shape_predictor.");
+ dlib::deserialize(item.initial_shape, in);
+ dlib::deserialize(item.forests, in);
+ dlib::deserialize(item.anchor_idx, in);
+ dlib::deserialize(item.deltas, in);
+ }
+
+// ----------------------------------------------------------------------------------------
+// ----------------------------------------------------------------------------------------
+// ----------------------------------------------------------------------------------------
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename image_array
+ >
+ double test_shape_predictor (
+ const shape_predictor& sp,
+ const image_array& images,
+ const std::vector<std::vector<full_object_detection> >& objects,
+ const std::vector<std::vector<double> >& scales
+ )
+ {
+ // make sure requires clause is not broken
+#ifdef ENABLE_ASSERTS
+ DLIB_CASSERT( images.size() == objects.size() ,
+ "\t double test_shape_predictor()"
+ << "\n\t Invalid inputs were given to this function. "
+ << "\n\t images.size(): " << images.size()
+ << "\n\t objects.size(): " << objects.size()
+ );
+ for (unsigned long i = 0; i < objects.size(); ++i)
+ {
+ for (unsigned long j = 0; j < objects[i].size(); ++j)
+ {
+ DLIB_CASSERT(objects[i][j].num_parts() == sp.num_parts(),
+ "\t double test_shape_predictor()"
+ << "\n\t Invalid inputs were given to this function. "
+ << "\n\t objects["<<i<<"]["<<j<<"].num_parts(): " << objects[i][j].num_parts()
+ << "\n\t sp.num_parts(): " << sp.num_parts()
+ );
+ }
+ if (scales.size() != 0)
+ {
+ DLIB_CASSERT(objects[i].size() == scales[i].size(),
+ "\t double test_shape_predictor()"
+ << "\n\t Invalid inputs were given to this function. "
+ << "\n\t objects["<<i<<"].size(): " << objects[i].size()
+ << "\n\t scales["<<i<<"].size(): " << scales[i].size()
+ );
+
+ }
+ }
+#endif
+
+ running_stats<double> rs;
+ for (unsigned long i = 0; i < objects.size(); ++i)
+ {
+ for (unsigned long j = 0; j < objects[i].size(); ++j)
+ {
+ // Just use a scale of 1 (i.e. no scale at all) if the caller didn't supply
+ // any scales.
+ const double scale = scales.size()==0 ? 1 : scales[i][j];
+
+ full_object_detection det = sp(images[i], objects[i][j].get_rect());
+
+ for (unsigned long k = 0; k < det.num_parts(); ++k)
+ {
+ if (objects[i][j].part(k) != OBJECT_PART_NOT_PRESENT)
+ {
+ double score = length(det.part(k) - objects[i][j].part(k))/scale;
+ rs.add(score);
+ }
+ }
+ }
+ }
+ return rs.mean();
+ }
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename image_array
+ >
+ double test_shape_predictor (
+ const shape_predictor& sp,
+ const image_array& images,
+ const std::vector<std::vector<full_object_detection> >& objects
+ )
+ {
+ std::vector<std::vector<double> > no_scales;
+ return test_shape_predictor(sp, images, objects, no_scales);
+ }
+
+// ----------------------------------------------------------------------------------------
+
+}
+
+#endif // DLIB_SHAPE_PREDICToR_H_
+
diff --git a/ml/dlib/dlib/image_processing/shape_predictor_abstract.h b/ml/dlib/dlib/image_processing/shape_predictor_abstract.h
new file mode 100644
index 000000000..718b4952e
--- /dev/null
+++ b/ml/dlib/dlib/image_processing/shape_predictor_abstract.h
@@ -0,0 +1,195 @@
+// Copyright (C) 2014 Davis E. King (davis@dlib.net)
+// License: Boost Software License See LICENSE.txt for the full license.
+#undef DLIB_SHAPE_PREDICToR_ABSTRACT_H_
+#ifdef DLIB_SHAPE_PREDICToR_ABSTRACT_H_
+
+#include "full_object_detection_abstract.h"
+#include "../matrix.h"
+#include "../geometry.h"
+#include "../pixel.h"
+
+namespace dlib
+{
+
+// ----------------------------------------------------------------------------------------
+
+ class shape_predictor
+ {
+ /*!
+ WHAT THIS OBJECT REPRESENTS
+ This object is a tool that takes in an image region containing some object
+ and outputs a set of point locations that define the pose of the object.
+ The classic example of this is human face pose prediction, where you take
+ an image of a human face as input and are expected to identify the
+ locations of important facial landmarks such as the corners of the mouth
+ and eyes, tip of the nose, and so forth.
+
+ To create useful instantiations of this object you need to use the
+ shape_predictor_trainer object defined in the
+ shape_predictor_trainer_abstract.h file to train a shape_predictor using a
+ set of training images, each annotated with shapes you want to predict.
+
+ THREAD SAFETY
+ No synchronization is required when using this object. In particular, a
+ single instance of this object can be used from multiple threads at the
+ same time.
+ !*/
+
+ public:
+
+ shape_predictor (
+ );
+ /*!
+ ensures
+ - #num_parts() == 0
+ - #num_features() == 0
+ !*/
+
+ unsigned long num_parts (
+ ) const;
+ /*!
+ ensures
+ - returns the number of parts in the shapes predicted by this object.
+ !*/
+
+ unsigned long num_features (
+ ) const;
+ /*!
+ ensures
+ - Returns the dimensionality of the feature vector output by operator().
+ This number is the total number of trees in this object times the number
+ of leaves on each tree.
+ !*/
+
+ template <typename image_type, typename T, typename U>
+ full_object_detection operator()(
+ const image_type& img,
+ const rectangle& rect,
+ std::vector<std::pair<T,U> >& feats
+ ) const;
+ /*!
+ requires
+ - image_type == an image object that implements the interface defined in
+ dlib/image_processing/generic_image.h
+ - T is some unsigned integral type (e.g. unsigned int).
+ - U is any scalar type capable of storing the value 1 (e.g. float).
+ ensures
+ - Runs the shape prediction algorithm on the part of the image contained in
+ the given bounding rectangle. So it will try and fit the shape model to
+ the contents of the given rectangle in the image. For example, if there
+ is a human face inside the rectangle and you use a face landmarking shape
+ model then this function will return the locations of the face landmarks
+ as the parts. So the return value is a full_object_detection DET such
+ that:
+ - DET.get_rect() == rect
+ - DET.num_parts() == num_parts()
+ - for all valid i:
+ - DET.part(i) == the location in img for the i-th part of the shape
+ predicted by this object.
+ - #feats == a sparse vector that records which leaf each tree used to make
+ the shape prediction. Moreover, it is an indicator vector, Therefore,
+ for all valid i:
+ - #feats[i].second == 1
+ Further, #feats is a vector from the space of num_features() dimensional
+ vectors. The output shape positions can be represented as the dot
+ product between #feats and a weight vector. Therefore, #feats encodes
+ all the information from img that was used to predict the returned shape
+ object.
+ !*/
+
+ template <typename image_type>
+ full_object_detection operator()(
+ const image_type& img,
+ const rectangle& rect
+ ) const;
+ /*!
+ requires
+ - image_type == an image object that implements the interface defined in
+ dlib/image_processing/generic_image.h
+ ensures
+ - Calling this function is equivalent to calling (*this)(img, rect, ignored)
+ where the 3d argument is discarded.
+ !*/
+
+ };
+
+ void serialize (const shape_predictor& item, std::ostream& out);
+ void deserialize (shape_predictor& item, std::istream& in);
+ /*!
+ provides serialization support
+ !*/
+
+// ----------------------------------------------------------------------------------------
+// ----------------------------------------------------------------------------------------
+// ----------------------------------------------------------------------------------------
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename image_array
+ >
+ double test_shape_predictor (
+ const shape_predictor& sp,
+ const image_array& images,
+ const std::vector<std::vector<full_object_detection> >& objects,
+ const std::vector<std::vector<double> >& scales
+ );
+ /*!
+ requires
+ - image_array is a dlib::array of image objects where each image object
+ implements the interface defined in dlib/image_processing/generic_image.h
+ - images.size() == objects.size()
+ - for all valid i and j:
+ - objects[i][j].num_parts() == sp.num_parts()
+ - if (scales.size() != 0) then
+ - There must be a scale value for each full_object_detection in objects.
+ That is, it must be the case that:
+ - scales.size() == objects.size()
+ - for all valid i:
+ - scales[i].size() == objects[i].size()
+ ensures
+ - Tests the given shape_predictor by running it on each of the given objects and
+ checking how well it recovers the part positions. In particular, for all
+ valid i and j we perform:
+ sp(images[i], objects[i][j].get_rect())
+ and compare the result with the truth part positions in objects[i][j]. We
+ then return the average distance (measured in pixels) between a predicted
+ part location and its true position.
+ - Note that any parts in objects that are set to OBJECT_PART_NOT_PRESENT are
+ simply ignored.
+ - if (scales.size() != 0) then
+ - Each time we compute the distance between a predicted part location and
+ its true location in objects[i][j] we divide the distance by
+ scales[i][j]. Therefore, if you want the reported error to be the
+ average pixel distance then give an empty scales vector, but if you want
+ the returned value to be something else like the average distance
+ normalized by some feature of each object (e.g. the interocular distance)
+ then you can supply those normalizing values via scales.
+ !*/
+
+ template <
+ typename image_array
+ >
+ double test_shape_predictor (
+ const shape_predictor& sp,
+ const image_array& images,
+ const std::vector<std::vector<full_object_detection> >& objects
+ );
+ /*!
+ requires
+ - image_array is a dlib::array of image objects where each image object
+ implements the interface defined in dlib/image_processing/generic_image.h
+ - images.size() == objects.size()
+ - for all valid i and j:
+ - objects[i][j].num_parts() == sp.num_parts()
+ ensures
+ - returns test_shape_predictor(sp, images, objects, no_scales) where no_scales
+ is an empty vector. So this is just a convenience function for calling the
+ above test_shape_predictor() routine without a scales argument.
+ !*/
+
+// ----------------------------------------------------------------------------------------
+
+}
+
+#endif // DLIB_SHAPE_PREDICToR_ABSTRACT_H_
+
diff --git a/ml/dlib/dlib/image_processing/shape_predictor_trainer.h b/ml/dlib/dlib/image_processing/shape_predictor_trainer.h
new file mode 100644
index 000000000..3090998f9
--- /dev/null
+++ b/ml/dlib/dlib/image_processing/shape_predictor_trainer.h
@@ -0,0 +1,852 @@
+// Copyright (C) 2014 Davis E. King (davis@dlib.net)
+// License: Boost Software License See LICENSE.txt for the full license.
+#ifndef DLIB_SHAPE_PREDICToR_TRAINER_H_
+#define DLIB_SHAPE_PREDICToR_TRAINER_H_
+
+#include "shape_predictor_trainer_abstract.h"
+#include "shape_predictor.h"
+#include "../console_progress_indicator.h"
+#include "../threads.h"
+#include "../data_io/image_dataset_metadata.h"
+#include "box_overlap_testing.h"
+
+namespace dlib
+{
+
+// ----------------------------------------------------------------------------------------
+
+ class shape_predictor_trainer
+ {
+ /*!
+ This thing really only works with unsigned char or rgb_pixel images (since we assume the threshold
+ should be in the range [-128,128]).
+ !*/
+ public:
+
+ enum padding_mode_t
+ {
+ bounding_box_relative,
+ landmark_relative
+ };
+
+ shape_predictor_trainer (
+ )
+ {
+ _cascade_depth = 10;
+ _tree_depth = 4;
+ _num_trees_per_cascade_level = 500;
+ _nu = 0.1;
+ _oversampling_amount = 20;
+ _feature_pool_size = 400;
+ _lambda = 0.1;
+ _num_test_splits = 20;
+ _feature_pool_region_padding = 0;
+ _verbose = false;
+ _num_threads = 0;
+ _padding_mode = landmark_relative;
+ }
+
+ unsigned long get_cascade_depth (
+ ) const { return _cascade_depth; }
+
+ void set_cascade_depth (
+ unsigned long depth
+ )
+ {
+ DLIB_CASSERT(depth > 0,
+ "\t void shape_predictor_trainer::set_cascade_depth()"
+ << "\n\t Invalid inputs were given to this function. "
+ << "\n\t depth: " << depth
+ );
+
+ _cascade_depth = depth;
+ }
+
+ unsigned long get_tree_depth (
+ ) const { return _tree_depth; }
+
+ void set_tree_depth (
+ unsigned long depth
+ )
+ {
+ DLIB_CASSERT(depth > 0,
+ "\t void shape_predictor_trainer::set_tree_depth()"
+ << "\n\t Invalid inputs were given to this function. "
+ << "\n\t depth: " << depth
+ );
+
+ _tree_depth = depth;
+ }
+
+ unsigned long get_num_trees_per_cascade_level (
+ ) const { return _num_trees_per_cascade_level; }
+
+ void set_num_trees_per_cascade_level (
+ unsigned long num
+ )
+ {
+ DLIB_CASSERT( num > 0,
+ "\t void shape_predictor_trainer::set_num_trees_per_cascade_level()"
+ << "\n\t Invalid inputs were given to this function. "
+ << "\n\t num: " << num
+ );
+ _num_trees_per_cascade_level = num;
+ }
+
+ double get_nu (
+ ) const { return _nu; }
+ void set_nu (
+ double nu
+ )
+ {
+ DLIB_CASSERT(0 < nu && nu <= 1,
+ "\t void shape_predictor_trainer::set_nu()"
+ << "\n\t Invalid inputs were given to this function. "
+ << "\n\t nu: " << nu
+ );
+
+ _nu = nu;
+ }
+
+ std::string get_random_seed (
+ ) const { return rnd.get_seed(); }
+ void set_random_seed (
+ const std::string& seed
+ ) { rnd.set_seed(seed); }
+
+ unsigned long get_oversampling_amount (
+ ) const { return _oversampling_amount; }
+ void set_oversampling_amount (
+ unsigned long amount
+ )
+ {
+ DLIB_CASSERT(amount > 0,
+ "\t void shape_predictor_trainer::set_oversampling_amount()"
+ << "\n\t Invalid inputs were given to this function. "
+ << "\n\t amount: " << amount
+ );
+
+ _oversampling_amount = amount;
+ }
+
+ unsigned long get_feature_pool_size (
+ ) const { return _feature_pool_size; }
+ void set_feature_pool_size (
+ unsigned long size
+ )
+ {
+ DLIB_CASSERT(size > 1,
+ "\t void shape_predictor_trainer::set_feature_pool_size()"
+ << "\n\t Invalid inputs were given to this function. "
+ << "\n\t size: " << size
+ );
+
+ _feature_pool_size = size;
+ }
+
+ double get_lambda (
+ ) const { return _lambda; }
+ void set_lambda (
+ double lambda
+ )
+ {
+ DLIB_CASSERT(lambda > 0,
+ "\t void shape_predictor_trainer::set_lambda()"
+ << "\n\t Invalid inputs were given to this function. "
+ << "\n\t lambda: " << lambda
+ );
+
+ _lambda = lambda;
+ }
+
+ unsigned long get_num_test_splits (
+ ) const { return _num_test_splits; }
+ void set_num_test_splits (
+ unsigned long num
+ )
+ {
+ DLIB_CASSERT(num > 0,
+ "\t void shape_predictor_trainer::set_num_test_splits()"
+ << "\n\t Invalid inputs were given to this function. "
+ << "\n\t num: " << num
+ );
+
+ _num_test_splits = num;
+ }
+
+ void set_padding_mode (
+ padding_mode_t mode
+ )
+ {
+ _padding_mode = mode;
+ }
+
+ padding_mode_t get_padding_mode (
+ ) const { return _padding_mode; }
+
+ double get_feature_pool_region_padding (
+ ) const { return _feature_pool_region_padding; }
+ void set_feature_pool_region_padding (
+ double padding
+ )
+ {
+ DLIB_CASSERT(padding > -0.5,
+ "\t void shape_predictor_trainer::set_feature_pool_region_padding()"
+ << "\n\t Invalid inputs were given to this function. "
+ << "\n\t padding: " << padding
+ );
+
+ _feature_pool_region_padding = padding;
+ }
+
+ void be_verbose (
+ )
+ {
+ _verbose = true;
+ }
+
+ void be_quiet (
+ )
+ {
+ _verbose = false;
+ }
+
+ unsigned long get_num_threads (
+ ) const { return _num_threads; }
+ void set_num_threads (
+ unsigned long num
+ )
+ {
+ _num_threads = num;
+ }
+
+ template <typename image_array>
+ shape_predictor train (
+ const image_array& images,
+ const std::vector<std::vector<full_object_detection> >& objects
+ ) const
+ {
+ using namespace impl;
+ DLIB_CASSERT(images.size() == objects.size() && images.size() > 0,
+ "\t shape_predictor shape_predictor_trainer::train()"
+ << "\n\t Invalid inputs were given to this function. "
+ << "\n\t images.size(): " << images.size()
+ << "\n\t objects.size(): " << objects.size()
+ );
+ // make sure the objects agree on the number of parts and that there is at
+ // least one full_object_detection.
+ unsigned long num_parts = 0;
+ std::vector<int> part_present;
+ for (unsigned long i = 0; i < objects.size(); ++i)
+ {
+ for (unsigned long j = 0; j < objects[i].size(); ++j)
+ {
+ if (num_parts == 0)
+ {
+ num_parts = objects[i][j].num_parts();
+ DLIB_CASSERT(objects[i][j].num_parts() != 0,
+ "\t shape_predictor shape_predictor_trainer::train()"
+ << "\n\t You can't give objects that don't have any parts to the trainer."
+ );
+ part_present.resize(num_parts);
+ }
+ else
+ {
+ DLIB_CASSERT(objects[i][j].num_parts() == num_parts,
+ "\t shape_predictor shape_predictor_trainer::train()"
+ << "\n\t All the objects must agree on the number of parts. "
+ << "\n\t objects["<<i<<"]["<<j<<"].num_parts(): " << objects[i][j].num_parts()
+ << "\n\t num_parts: " << num_parts
+ );
+ }
+ for (unsigned long p = 0; p < objects[i][j].num_parts(); ++p)
+ {
+ if (objects[i][j].part(p) != OBJECT_PART_NOT_PRESENT)
+ part_present[p] = 1;
+ }
+ }
+ }
+ DLIB_CASSERT(num_parts != 0,
+ "\t shape_predictor shape_predictor_trainer::train()"
+ << "\n\t You must give at least one full_object_detection if you want to train a shape model and it must have parts."
+ );
+ DLIB_CASSERT(sum(mat(part_present)) == (long)num_parts,
+ "\t shape_predictor shape_predictor_trainer::train()"
+ << "\n\t Each part must appear at least once in this training data. That is, "
+ << "\n\t you can't have a part that is always set to OBJECT_PART_NOT_PRESENT."
+ );
+
+ // creating thread pool. if num_threads <= 1, trainer should work in caller thread
+ thread_pool tp(_num_threads > 1 ? _num_threads : 0);
+
+ // determining the type of features used for this type of images
+ typedef typename std::remove_const<typename std::remove_reference<decltype(images[0])>::type>::type image_type;
+ typedef typename image_traits<image_type>::pixel_type pixel_type;
+ typedef typename pixel_traits<pixel_type>::basic_pixel_type feature_type;
+
+ rnd.set_seed(get_random_seed());
+
+ std::vector<training_sample<feature_type>> samples;
+ const matrix<float,0,1> initial_shape = populate_training_sample_shapes(objects, samples);
+ const std::vector<std::vector<dlib::vector<float,2> > > pixel_coordinates = randomly_sample_pixel_coordinates(initial_shape);
+
+ unsigned long trees_fit_so_far = 0;
+ console_progress_indicator pbar(get_cascade_depth()*get_num_trees_per_cascade_level());
+ if (_verbose)
+ std::cout << "Fitting trees..." << std::endl;
+
+ std::vector<std::vector<impl::regression_tree> > forests(get_cascade_depth());
+ // Now start doing the actual training by filling in the forests
+ for (unsigned long cascade = 0; cascade < get_cascade_depth(); ++cascade)
+ {
+ // Each cascade uses a different set of pixels for its features. We compute
+ // their representations relative to the initial shape first.
+ std::vector<unsigned long> anchor_idx;
+ std::vector<dlib::vector<float,2> > deltas;
+ create_shape_relative_encoding(initial_shape, pixel_coordinates[cascade], anchor_idx, deltas);
+
+ // First compute the feature_pixel_values for each training sample at this
+ // level of the cascade.
+ parallel_for(tp, 0, samples.size(), [&](unsigned long i)
+ {
+ impl::extract_feature_pixel_values(images[samples[i].image_idx], samples[i].rect,
+ samples[i].current_shape, initial_shape, anchor_idx,
+ deltas, samples[i].feature_pixel_values);
+ }, 1);
+
+ // Now start building the trees at this cascade level.
+ for (unsigned long i = 0; i < get_num_trees_per_cascade_level(); ++i)
+ {
+ forests[cascade].push_back(make_regression_tree(tp, samples, pixel_coordinates[cascade]));
+
+ if (_verbose)
+ {
+ ++trees_fit_so_far;
+ pbar.print_status(trees_fit_so_far);
+ }
+ }
+ }
+
+ if (_verbose)
+ std::cout << "Training complete " << std::endl;
+
+ return shape_predictor(initial_shape, forests, pixel_coordinates);
+ }
+
+ private:
+
+ static void object_to_shape (
+ const full_object_detection& obj,
+ matrix<float,0,1>& shape,
+ matrix<float,0,1>& present // a mask telling which elements of #shape are present.
+ )
+ {
+ shape.set_size(obj.num_parts()*2);
+ present.set_size(obj.num_parts()*2);
+ const point_transform_affine tform_from_img = impl::normalizing_tform(obj.get_rect());
+ for (unsigned long i = 0; i < obj.num_parts(); ++i)
+ {
+ if (obj.part(i) != OBJECT_PART_NOT_PRESENT)
+ {
+ vector<float,2> p = tform_from_img(obj.part(i));
+ shape(2*i) = p.x();
+ shape(2*i+1) = p.y();
+ present(2*i) = 1;
+ present(2*i+1) = 1;
+
+ if (length(p) > 100)
+ {
+ std::cout << "Warning, one of your objects has parts that are way outside its bounding box! This is probably an error in your annotation." << std::endl;
+ }
+ }
+ else
+ {
+ shape(2*i) = 0;
+ shape(2*i+1) = 0;
+ present(2*i) = 0;
+ present(2*i+1) = 0;
+ }
+ }
+ }
+
+ template<typename feature_type>
+ struct training_sample
+ {
+ /*!
+
+ CONVENTION
+ - feature_pixel_values.size() == get_feature_pool_size()
+ - feature_pixel_values[j] == the value of the j-th feature pool
+ pixel when you look it up relative to the shape in current_shape.
+
+ - target_shape == The truth shape. Stays constant during the whole
+ training process (except for the parts that are not present, those are
+ always equal to the current_shape values).
+ - present == 0/1 mask saying which parts of target_shape are present.
+ - rect == the position of the object in the image_idx-th image. All shape
+ coordinates are coded relative to this rectangle.
+ - diff_shape == temporary value for holding difference between current
+ shape and target shape
+ !*/
+
+ unsigned long image_idx;
+ rectangle rect;
+ matrix<float,0,1> target_shape;
+ matrix<float,0,1> present;
+
+ matrix<float,0,1> current_shape;
+ matrix<float,0,1> diff_shape;
+ std::vector<feature_type> feature_pixel_values;
+
+ void swap(training_sample& item)
+ {
+ std::swap(image_idx, item.image_idx);
+ std::swap(rect, item.rect);
+ target_shape.swap(item.target_shape);
+ present.swap(item.present);
+ current_shape.swap(item.current_shape);
+ diff_shape.swap(item.diff_shape);
+ feature_pixel_values.swap(item.feature_pixel_values);
+ }
+ };
+
+ template<typename feature_type>
+ impl::regression_tree make_regression_tree (
+ thread_pool& tp,
+ std::vector<training_sample<feature_type>>& samples,
+ const std::vector<dlib::vector<float,2> >& pixel_coordinates
+ ) const
+ {
+ using namespace impl;
+ std::deque<std::pair<unsigned long, unsigned long> > parts;
+ parts.push_back(std::make_pair(0, (unsigned long)samples.size()));
+
+ impl::regression_tree tree;
+
+ // walk the tree in breadth first order
+ const unsigned long num_split_nodes = static_cast<unsigned long>(std::pow(2.0, (double)get_tree_depth())-1);
+ std::vector<matrix<float,0,1> > sums(num_split_nodes*2+1);
+ if (tp.num_threads_in_pool() > 1)
+ {
+ // Here we need to calculate shape differences and store sum of differences into sums[0]
+ // to make it. I am splitting samples into blocks, each block will be processed by
+ // separate thread, and the sum of differences of each block is stored into separate
+ // place in block_sums
+
+ const unsigned long num_workers = std::max(1UL, tp.num_threads_in_pool());
+ const unsigned long num = samples.size();
+ const unsigned long block_size = std::max(1UL, (num + num_workers - 1) / num_workers);
+ std::vector<matrix<float,0,1> > block_sums(num_workers);
+
+ parallel_for(tp, 0, num_workers, [&](unsigned long block)
+ {
+ const unsigned long block_begin = block * block_size;
+ const unsigned long block_end = std::min(num, block_begin + block_size);
+ for (unsigned long i = block_begin; i < block_end; ++i)
+ {
+ samples[i].diff_shape = samples[i].target_shape - samples[i].current_shape;
+ block_sums[block] += samples[i].diff_shape;
+ }
+ }, 1);
+
+ // now calculate the total result from separate blocks
+ for (unsigned long i = 0; i < block_sums.size(); ++i)
+ sums[0] += block_sums[i];
+ }
+ else
+ {
+ // synchronous implementation
+ for (unsigned long i = 0; i < samples.size(); ++i)
+ {
+ samples[i].diff_shape = samples[i].target_shape - samples[i].current_shape;
+ sums[0] += samples[i].diff_shape;
+ }
+ }
+
+ for (unsigned long i = 0; i < num_split_nodes; ++i)
+ {
+ std::pair<unsigned long,unsigned long> range = parts.front();
+ parts.pop_front();
+
+ const impl::split_feature split = generate_split(tp, samples, range.first,
+ range.second, pixel_coordinates, sums[i], sums[left_child(i)],
+ sums[right_child(i)]);
+ tree.splits.push_back(split);
+ const unsigned long mid = partition_samples(split, samples, range.first, range.second);
+
+ parts.push_back(std::make_pair(range.first, mid));
+ parts.push_back(std::make_pair(mid, range.second));
+ }
+
+ // Now all the parts contain the ranges for the leaves so we can use them to
+ // compute the average leaf values.
+ matrix<float,0,1> present_counts(samples[0].target_shape.size());
+ tree.leaf_values.resize(parts.size());
+ for (unsigned long i = 0; i < parts.size(); ++i)
+ {
+ // Get the present counts for each dimension so we can divide each
+ // dimension by the number of observations we have on it to find the mean
+ // displacement in each leaf.
+ present_counts = 0;
+ for (unsigned long j = parts[i].first; j < parts[i].second; ++j)
+ present_counts += samples[j].present;
+ present_counts = dlib::reciprocal(present_counts);
+
+ if (parts[i].second != parts[i].first)
+ tree.leaf_values[i] = pointwise_multiply(present_counts,sums[num_split_nodes+i]*get_nu());
+ else
+ tree.leaf_values[i] = zeros_matrix(samples[0].target_shape);
+
+ // now adjust the current shape based on these predictions
+ parallel_for(tp, parts[i].first, parts[i].second, [&](unsigned long j)
+ {
+ samples[j].current_shape += tree.leaf_values[i];
+ // For parts that aren't present in the training data, we just make
+ // sure that the target shape always matches and therefore gives zero
+ // error. So this makes the algorithm simply ignore non-present
+ // landmarks.
+ for (long k = 0; k < samples[j].present.size(); ++k)
+ {
+ // if this part is not present
+ if (samples[j].present(k) == 0)
+ samples[j].target_shape(k) = samples[j].current_shape(k);
+ }
+ }, 1);
+ }
+
+ return tree;
+ }
+
+ impl::split_feature randomly_generate_split_feature (
+ const std::vector<dlib::vector<float,2> >& pixel_coordinates
+ ) const
+ {
+ const double lambda = get_lambda();
+ impl::split_feature feat;
+ const size_t max_iters = get_feature_pool_size()*get_feature_pool_size();
+ for (size_t i = 0; i < max_iters; ++i)
+ {
+ feat.idx1 = rnd.get_integer(get_feature_pool_size());
+ feat.idx2 = rnd.get_integer(get_feature_pool_size());
+ while (feat.idx1 == feat.idx2)
+ feat.idx2 = rnd.get_integer(get_feature_pool_size());
+ const double dist = length(pixel_coordinates[feat.idx1]-pixel_coordinates[feat.idx2]);
+ const double accept_prob = std::exp(-dist/lambda);
+ if (accept_prob > rnd.get_random_double())
+ break;
+ }
+
+ feat.thresh = (rnd.get_random_double()*256 - 128)/2.0;
+
+ return feat;
+ }
+
+ template<typename feature_type>
+ impl::split_feature generate_split (
+ thread_pool& tp,
+ const std::vector<training_sample<feature_type>>& samples,
+ unsigned long begin,
+ unsigned long end,
+ const std::vector<dlib::vector<float,2> >& pixel_coordinates,
+ const matrix<float,0,1>& sum,
+ matrix<float,0,1>& left_sum,
+ matrix<float,0,1>& right_sum
+ ) const
+ {
+ // generate a bunch of random splits and test them and return the best one.
+
+ const unsigned long num_test_splits = get_num_test_splits();
+
+ // sample the random features we test in this function
+ std::vector<impl::split_feature> feats;
+ feats.reserve(num_test_splits);
+ for (unsigned long i = 0; i < num_test_splits; ++i)
+ feats.push_back(randomly_generate_split_feature(pixel_coordinates));
+
+ std::vector<matrix<float,0,1> > left_sums(num_test_splits);
+ std::vector<unsigned long> left_cnt(num_test_splits);
+
+ const unsigned long num_workers = std::max(1UL, tp.num_threads_in_pool());
+ const unsigned long block_size = std::max(1UL, (num_test_splits + num_workers - 1) / num_workers);
+
+ // now compute the sums of vectors that go left for each feature
+ parallel_for(tp, 0, num_workers, [&](unsigned long block)
+ {
+ const unsigned long block_begin = block * block_size;
+ const unsigned long block_end = std::min(block_begin + block_size, num_test_splits);
+
+ for (unsigned long j = begin; j < end; ++j)
+ {
+ for (unsigned long i = block_begin; i < block_end; ++i)
+ {
+ if ((float)samples[j].feature_pixel_values[feats[i].idx1] - (float)samples[j].feature_pixel_values[feats[i].idx2] > feats[i].thresh)
+ {
+ left_sums[i] += samples[j].diff_shape;
+ ++left_cnt[i];
+ }
+ }
+ }
+
+ }, 1);
+
+ // now figure out which feature is the best
+ double best_score = -1;
+ unsigned long best_feat = 0;
+ matrix<float,0,1> temp;
+ for (unsigned long i = 0; i < num_test_splits; ++i)
+ {
+ // check how well the feature splits the space.
+ double score = 0;
+ unsigned long right_cnt = end-begin-left_cnt[i];
+ if (left_cnt[i] != 0 && right_cnt != 0)
+ {
+ temp = sum - left_sums[i];
+ score = dot(left_sums[i],left_sums[i])/left_cnt[i] + dot(temp,temp)/right_cnt;
+ if (score > best_score)
+ {
+ best_score = score;
+ best_feat = i;
+ }
+ }
+ }
+
+ left_sums[best_feat].swap(left_sum);
+ if (left_sum.size() != 0)
+ {
+ right_sum = sum - left_sum;
+ }
+ else
+ {
+ right_sum = sum;
+ left_sum = zeros_matrix(sum);
+ }
+ return feats[best_feat];
+ }
+
+ template<typename feature_type>
+ unsigned long partition_samples (
+ const impl::split_feature& split,
+ std::vector<training_sample<feature_type>>& samples,
+ unsigned long begin,
+ unsigned long end
+ ) const
+ {
+ // splits samples based on split (sorta like in quick sort) and returns the mid
+ // point. make sure you return the mid in a way compatible with how we walk
+ // through the tree.
+
+ unsigned long i = begin;
+ for (unsigned long j = begin; j < end; ++j)
+ {
+ if ((float)samples[j].feature_pixel_values[split.idx1] - (float)samples[j].feature_pixel_values[split.idx2] > split.thresh)
+ {
+ samples[i].swap(samples[j]);
+ ++i;
+ }
+ }
+ return i;
+ }
+
+
+
+ template<typename feature_type>
+ matrix<float,0,1> populate_training_sample_shapes(
+ const std::vector<std::vector<full_object_detection> >& objects,
+ std::vector<training_sample<feature_type>>& samples
+ ) const
+ {
+ samples.clear();
+ matrix<float,0,1> mean_shape;
+ matrix<float,0,1> count;
+ // first fill out the target shapes
+ for (unsigned long i = 0; i < objects.size(); ++i)
+ {
+ for (unsigned long j = 0; j < objects[i].size(); ++j)
+ {
+ training_sample<feature_type> sample;
+ sample.image_idx = i;
+ sample.rect = objects[i][j].get_rect();
+ object_to_shape(objects[i][j], sample.target_shape, sample.present);
+ for (unsigned long itr = 0; itr < get_oversampling_amount(); ++itr)
+ samples.push_back(sample);
+ mean_shape += sample.target_shape;
+ count += sample.present;
+ }
+ }
+
+ mean_shape = pointwise_multiply(mean_shape,reciprocal(count));
+
+ // now go pick random initial shapes
+ for (unsigned long i = 0; i < samples.size(); ++i)
+ {
+ if ((i%get_oversampling_amount()) == 0)
+ {
+ // The mean shape is what we really use as an initial shape so always
+ // include it in the training set as an example starting shape.
+ samples[i].current_shape = mean_shape;
+ }
+ else
+ {
+ samples[i].current_shape.set_size(0);
+
+ matrix<float,0,1> hits(mean_shape.size());
+ hits = 0;
+
+ int iter = 0;
+ // Pick a few samples at random and randomly average them together to
+ // make the initial shape. Note that we make sure we get at least one
+ // observation (i.e. non-OBJECT_PART_NOT_PRESENT) on each part
+ // location.
+ while(min(hits) == 0 || iter < 2)
+ {
+ ++iter;
+ const unsigned long rand_idx = rnd.get_random_32bit_number()%samples.size();
+ const double alpha = rnd.get_random_double()+0.1;
+ samples[i].current_shape += alpha*samples[rand_idx].target_shape;
+ hits += alpha*samples[rand_idx].present;
+ }
+ samples[i].current_shape = pointwise_multiply(samples[i].current_shape, reciprocal(hits));
+ }
+
+ }
+ for (unsigned long i = 0; i < samples.size(); ++i)
+ {
+ for (long k = 0; k < samples[i].present.size(); ++k)
+ {
+ // if this part is not present
+ if (samples[i].present(k) == 0)
+ samples[i].target_shape(k) = samples[i].current_shape(k);
+ }
+ }
+
+
+ return mean_shape;
+ }
+
+
+ void randomly_sample_pixel_coordinates (
+ std::vector<dlib::vector<float,2> >& pixel_coordinates,
+ const double min_x,
+ const double min_y,
+ const double max_x,
+ const double max_y
+ ) const
+ /*!
+ ensures
+ - #pixel_coordinates.size() == get_feature_pool_size()
+ - for all valid i:
+ - pixel_coordinates[i] == a point in the box defined by the min/max x/y arguments.
+ !*/
+ {
+ pixel_coordinates.resize(get_feature_pool_size());
+ for (unsigned long i = 0; i < get_feature_pool_size(); ++i)
+ {
+ pixel_coordinates[i].x() = rnd.get_random_double()*(max_x-min_x) + min_x;
+ pixel_coordinates[i].y() = rnd.get_random_double()*(max_y-min_y) + min_y;
+ }
+ }
+
+ std::vector<std::vector<dlib::vector<float,2> > > randomly_sample_pixel_coordinates (
+ const matrix<float,0,1>& initial_shape
+ ) const
+ {
+ const double padding = get_feature_pool_region_padding();
+ // Figure out the bounds on the object shapes. We will sample uniformly
+ // from this box.
+ matrix<float> temp = reshape(initial_shape, initial_shape.size()/2, 2);
+ double min_x = min(colm(temp,0));
+ double min_y = min(colm(temp,1));
+ double max_x = max(colm(temp,0));
+ double max_y = max(colm(temp,1));
+
+ if (get_padding_mode() == bounding_box_relative)
+ {
+ min_x = std::min(0.0, min_x);
+ min_y = std::min(0.0, min_y);
+ max_x = std::max(1.0, max_x);
+ max_y = std::max(1.0, max_y);
+ }
+
+ min_x -= padding;
+ min_y -= padding;
+ max_x += padding;
+ max_y += padding;
+
+ std::vector<std::vector<dlib::vector<float,2> > > pixel_coordinates;
+ pixel_coordinates.resize(get_cascade_depth());
+ for (unsigned long i = 0; i < get_cascade_depth(); ++i)
+ randomly_sample_pixel_coordinates(pixel_coordinates[i], min_x, min_y, max_x, max_y);
+ return pixel_coordinates;
+ }
+
+
+
+ mutable dlib::rand rnd;
+
+ unsigned long _cascade_depth;
+ unsigned long _tree_depth;
+ unsigned long _num_trees_per_cascade_level;
+ double _nu;
+ unsigned long _oversampling_amount;
+ unsigned long _feature_pool_size;
+ double _lambda;
+ unsigned long _num_test_splits;
+ double _feature_pool_region_padding;
+ bool _verbose;
+ unsigned long _num_threads;
+ padding_mode_t _padding_mode;
+ };
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename some_type_of_rectangle
+ >
+ image_dataset_metadata::dataset make_bounding_box_regression_training_data (
+ const image_dataset_metadata::dataset& truth,
+ const std::vector<std::vector<some_type_of_rectangle>>& detections
+ )
+ {
+ DLIB_CASSERT(truth.images.size() == detections.size(),
+ "truth.images.size(): "<< truth.images.size() <<
+ "\tdetections.size(): "<< detections.size()
+ );
+ image_dataset_metadata::dataset result = truth;
+
+ for (size_t i = 0; i < truth.images.size(); ++i)
+ {
+ result.images[i].boxes.clear();
+ for (auto truth_box : truth.images[i].boxes)
+ {
+ if (truth_box.ignore)
+ continue;
+
+ // Find the detection that best matches the current truth_box.
+ auto det = max_scoring_element(detections[i], [&truth_box](const rectangle& r) { return box_intersection_over_union(r, truth_box.rect); });
+ if (det.second > 0.5)
+ {
+ // Remove any existing parts and replace them with the truth_box corners.
+ truth_box.parts.clear();
+ auto b = truth_box.rect;
+ truth_box.parts["left"] = (b.tl_corner()+b.bl_corner())/2;
+ truth_box.parts["right"] = (b.tr_corner()+b.br_corner())/2;
+ truth_box.parts["top"] = (b.tl_corner()+b.tr_corner())/2;
+ truth_box.parts["bottom"] = (b.bl_corner()+b.br_corner())/2;
+ truth_box.parts["middle"] = center(b);
+
+ // Now replace the bounding truth_box with the detector's bounding truth_box.
+ truth_box.rect = det.first;
+
+ result.images[i].boxes.push_back(truth_box);
+ }
+ }
+ }
+ return result;
+ }
+
+// ----------------------------------------------------------------------------------------
+
+}
+
+#endif // DLIB_SHAPE_PREDICToR_TRAINER_H_
+
diff --git a/ml/dlib/dlib/image_processing/shape_predictor_trainer_abstract.h b/ml/dlib/dlib/image_processing/shape_predictor_trainer_abstract.h
new file mode 100644
index 000000000..278b97842
--- /dev/null
+++ b/ml/dlib/dlib/image_processing/shape_predictor_trainer_abstract.h
@@ -0,0 +1,418 @@
+// Copyright (C) 2014 Davis E. King (davis@dlib.net)
+// License: Boost Software License See LICENSE.txt for the full license.
+#undef DLIB_SHAPE_PREDICToR_TRAINER_ABSTRACT_H_
+#ifdef DLIB_SHAPE_PREDICToR_TRAINER_ABSTRACT_H_
+
+#include "shape_predictor_abstract.h"
+#include "../data_io/image_dataset_metadata.h"
+
+namespace dlib
+{
+
+// ----------------------------------------------------------------------------------------
+
+ class shape_predictor_trainer
+ {
+ /*!
+ WHAT THIS OBJECT REPRESENTS
+ This object is a tool for training shape_predictors based on annotated training
+ images. Its implementation uses the algorithm described in:
+ One Millisecond Face Alignment with an Ensemble of Regression Trees
+ by Vahid Kazemi and Josephine Sullivan, CVPR 2014
+
+ !*/
+
+ public:
+
+ shape_predictor_trainer (
+ );
+ /*!
+ ensures
+ - #get_cascade_depth() == 10
+ - #get_tree_depth() == 4
+ - #get_num_trees_per_cascade_level() == 500
+ - #get_nu() == 0.1
+ - #get_oversampling_amount() == 20
+ - #get_feature_pool_size() == 400
+ - #get_lambda() == 0.1
+ - #get_num_test_splits() == 20
+ - #get_feature_pool_region_padding() == 0
+ - #get_random_seed() == ""
+ - #get_num_threads() == 0
+ - #get_padding_mode() == landmark_relative
+ - This object will not be verbose
+ !*/
+
+ unsigned long get_cascade_depth (
+ ) const;
+ /*!
+ ensures
+ - returns the number of cascades created when you train a model. This
+ means that the total number of trees in the learned model is equal to
+ get_cascade_depth()*get_num_trees_per_cascade_level().
+ !*/
+
+ void set_cascade_depth (
+ unsigned long depth
+ );
+ /*!
+ requires
+ - depth > 0
+ ensures
+ - #get_cascade_depth() == depth
+ !*/
+
+ unsigned long get_tree_depth (
+ ) const;
+ /*!
+ ensures
+ - returns the depth of the trees used in the cascade. In particular, there
+ are pow(2,get_tree_depth()) leaves in each tree.
+ !*/
+
+ void set_tree_depth (
+ unsigned long depth
+ );
+ /*!
+ requires
+ - depth > 0
+ ensures
+ - #get_tree_depth() == depth
+ !*/
+
+ unsigned long get_num_trees_per_cascade_level (
+ ) const;
+ /*!
+ ensures
+ - returns the number of trees created for each cascade. This means that
+ the total number of trees in the learned model is equal to
+ get_cascade_depth()*get_num_trees_per_cascade_level().
+ !*/
+
+ void set_num_trees_per_cascade_level (
+ unsigned long num
+ );
+ /*!
+ requires
+ - num > 0
+ ensures
+ - #get_num_trees_per_cascade_level() == num
+ !*/
+
+ double get_nu (
+ ) const;
+ /*!
+ ensures
+ - returns the regularization parameter. Larger values of this parameter
+ will cause the algorithm to fit the training data better but may also
+ cause overfitting.
+ !*/
+
+ void set_nu (
+ double nu
+ );
+ /*!
+ requires
+ - 0 < nu <= 1
+ ensures
+ - #get_nu() == nu
+ !*/
+
+ std::string get_random_seed (
+ ) const;
+ /*!
+ ensures
+ - returns the random seed used by the internal random number generator.
+ Since this algorithm is a random forest style algorithm it relies on a
+ random number generator for generating the trees. So each setting of the
+ random seed will produce slightly different outputs.
+ !*/
+
+ void set_random_seed (
+ const std::string& seed
+ );
+ /*!
+ ensures
+ - #get_random_seed() == seed
+ !*/
+
+ unsigned long get_oversampling_amount (
+ ) const;
+ /*!
+ ensures
+ - You give annotated images to this object as training examples. You
+ can effectively increase the amount of training data by adding in each
+ training example multiple times but with a randomly selected deformation
+ applied to it. That is what this parameter controls. That is, if you
+ supply N training samples to train() then the algorithm runs internally
+ with N*get_oversampling_amount() training samples. So the bigger this
+ parameter the better (excepting that larger values make training take
+ longer). In terms of the Kazemi paper, this parameter is the number of
+ randomly selected initial starting points sampled for each training
+ example.
+ !*/
+
+ void set_oversampling_amount (
+ unsigned long amount
+ );
+ /*!
+ requires
+ - amount > 0
+ ensures
+ - #get_oversampling_amount() == amount
+ !*/
+
+ unsigned long get_feature_pool_size (
+ ) const;
+ /*!
+ ensures
+ - At each level of the cascade we randomly sample get_feature_pool_size()
+ pixels from the image. These pixels are used to generate features for
+ the random trees. So in general larger settings of this parameter give
+ better accuracy but make the algorithm run slower.
+ !*/
+
+ void set_feature_pool_size (
+ unsigned long size
+ );
+ /*!
+ requires
+ - size > 1
+ ensures
+ - #get_feature_pool_size() == size
+ !*/
+
+ enum padding_mode_t
+ {
+ bounding_box_relative,
+ landmark_relative
+ };
+
+ padding_mode_t get_padding_mode (
+ ) const;
+ /*!
+ ensures
+ - returns the current padding mode. See get_feature_pool_region_padding()
+ for a discussion of the modes.
+ !*/
+
+ void set_padding_mode (
+ padding_mode_t mode
+ );
+ /*!
+ ensures
+ - #get_padding_mode() == mode
+ !*/
+
+ double get_feature_pool_region_padding (
+ ) const;
+ /*!
+ ensures
+ - This algorithm works by comparing the relative intensity of pairs of
+ pixels in the input image. To decide which pixels to look at, the
+ training algorithm randomly selects pixels from a box roughly centered
+ around the object of interest. We call this box the feature pool region
+ box.
+
+ Each object of interest is defined by a full_object_detection, which
+ contains a bounding box and a list of landmarks. If
+ get_padding_mode()==landmark_relative then the feature pool region box is
+ the tightest box that contains the landmarks inside the
+ full_object_detection. In this mode the full_object_detection's bounding
+ box is ignored. Otherwise, if the padding mode is bounding_box_relative
+ then the feature pool region box is the tightest box that contains BOTH
+ the landmarks and the full_object_detection's bounding box.
+
+ Additionally, you can adjust the size of the feature pool padding region
+ by setting get_feature_pool_region_padding() to some value. If
+ get_feature_pool_region_padding()==0 then the feature pool region box is
+ unmodified and defined exactly as stated above. However, you can expand
+ the size of the box by setting the padding > 0 or shrink it by setting it
+ to something < 0.
+
+ To explain this precisely, for a padding of 0 we say that the pixels are
+ sampled from a box of size 1x1. The padding value is added to each side
+ of the box. So a padding of 0.5 would cause the algorithm to sample
+ pixels from a box that was 2x2, effectively multiplying the area pixels
+ are sampled from by 4. Similarly, setting the padding to -0.2 would
+ cause it to sample from a box 0.6x0.6 in size.
+ !*/
+
+ void set_feature_pool_region_padding (
+ double padding
+ );
+ /*!
+ requires
+ - padding > -0.5
+ ensures
+ - #get_feature_pool_region_padding() == padding
+ !*/
+
+ double get_lambda (
+ ) const;
+ /*!
+ ensures
+ - To decide how to split nodes in the regression trees the algorithm looks
+ at pairs of pixels in the image. These pixel pairs are sampled randomly
+ but with a preference for selecting pixels that are near each other.
+ get_lambda() controls this "nearness" preference. In particular, smaller
+ values of get_lambda() will make the algorithm prefer to select pixels
+ close together and larger values of get_lambda() will make it care less
+ about picking nearby pixel pairs.
+
+ Note that this is the inverse of how it is defined in the Kazemi paper.
+ For this object, you should think of lambda as "the fraction of the
+ bounding box will we traverse to find a neighboring pixel". Nominally,
+ this is normalized between 0 and 1. So reasonable settings of lambda are
+ values in the range 0 < lambda < 1.
+ !*/
+
+ void set_lambda (
+ double lambda
+ );
+ /*!
+ requires
+ - lambda > 0
+ ensures
+ - #get_lambda() == lambda
+ !*/
+
+ unsigned long get_num_test_splits (
+ ) const;
+ /*!
+ ensures
+ - When generating the random trees we randomly sample get_num_test_splits()
+ possible split features at each node and pick the one that gives the best
+ split. Larger values of this parameter will usually give more accurate
+ outputs but take longer to train.
+ !*/
+
+ void set_num_test_splits (
+ unsigned long num
+ );
+ /*!
+ requires
+ - num > 0
+ ensures
+ - #get_num_test_splits() == num
+ !*/
+
+ unsigned long get_num_threads (
+ ) const;
+ /*!
+ ensures
+ - When running training process, it is possible to make some parts of it parallel
+ using CPU threads with #parallel_for() extension and creating #thread_pool internally
+ When get_num_threads() == 0, trainer will not create threads and all processing will
+ be done in the calling thread
+ !*/
+
+ void set_num_threads (
+ unsigned long num
+ );
+ /*!
+ requires
+ - num >= 0
+ ensures
+ - #get_num_threads() == num
+ !*/
+
+ void be_verbose (
+ );
+ /*!
+ ensures
+ - This object will print status messages to standard out so that a
+ user can observe the progress of the algorithm.
+ !*/
+
+ void be_quiet (
+ );
+ /*!
+ ensures
+ - This object will not print anything to standard out
+ !*/
+
+ template <typename image_array>
+ shape_predictor train (
+ const image_array& images,
+ const std::vector<std::vector<full_object_detection> >& objects
+ ) const;
+ /*!
+ requires
+ - image_array is a dlib::array of image objects where each image object
+ implements the interface defined in dlib/image_processing/generic_image.h
+ - images.size() == objects.size()
+ - images.size() > 0
+ - for some i: objects[i].size() != 0
+ (i.e. there has to be at least one full_object_detection in the training set)
+ - for all valid p, there must exist i and j such that:
+ objects[i][j].part(p) != OBJECT_PART_NOT_PRESENT.
+ (i.e. You can't define a part that is always set to OBJECT_PART_NOT_PRESENT.)
+ - for all valid i,j,k,l:
+ - objects[i][j].num_parts() == objects[k][l].num_parts()
+ (i.e. all objects must agree on the number of parts)
+ - objects[i][j].num_parts() > 0
+ ensures
+ - This object will try to learn to predict the locations of an object's parts
+ based on the object bounding box (i.e. full_object_detection::get_rect())
+ and the image pixels in that box. That is, we will try to learn a
+ shape_predictor, SP, such that:
+ SP(images[i], objects[i][j].get_rect()) == objects[i][j]
+ This learned SP object is then returned.
+ - Not all parts are required to be observed for all objects. So if you
+ have training instances with missing parts then set the part positions
+ equal to OBJECT_PART_NOT_PRESENT and this algorithm will basically ignore
+ those missing parts.
+ !*/
+ };
+
+// ----------------------------------------------------------------------------------------
+
+ template <
+ typename some_type_of_rectangle
+ >
+ image_dataset_metadata::dataset make_bounding_box_regression_training_data (
+ const image_dataset_metadata::dataset& truth,
+ const std::vector<std::vector<some_type_of_rectangle>>& detections
+ );
+ /*!
+ requires
+ - truth.images.size() == detections.size()
+ - some_type_of_rectangle == rectangle, drectangle, mmod_rect, or any other type
+ that is convertible to a rectangle.
+ ensures
+ - Suppose you have an object detector that can roughly locate objects in an
+ image. This means your detector draws boxes around objects, but these are
+ *rough* boxes in the sense that they aren't positioned super accurately. For
+ instance, HOG based detectors usually have a stride of 8 pixels. So the
+ positional accuracy is going to be, at best, +/-8 pixels.
+
+ If you want to get better positional accuracy one easy thing to do is train a
+ shape_predictor to give you the location of the object's box. The
+ make_bounding_box_regression_training_data() routine helps you do this by
+ creating an appropriate training dataset. It does this by taking the dataset
+ you used to train your detector (given by the truth object), and combining
+ that with the output of your detector on each image in the training dataset
+ (given by the detections object). In particular, it will create a new
+ annotated dataset where each object box is one of the rectangles from
+ detections and that object has 5 part annotations. These annotations
+ identify the sides and middle of the truth rectangle corresponding to the
+ detection rectangle. You can then take the returned dataset and train a
+ shape_predictor on it. The resulting shape_predictor can then be used to do
+ bounding box regression.
+
+ As an aside, the reason we create 5 part annotations in this way is because
+ it gives the best shape_predictor when trained. If instead you used the 4
+ corners it wouldn't work as well, due to tedious vagaries of the shape_predictor
+ training process.
+
+ - We assume that detections[i] contains object detections corresponding to
+ the image truth.images[i].
+ !*/
+
+// ----------------------------------------------------------------------------------------
+
+}
+
+#endif // DLIB_SHAPE_PREDICToR_TRAINER_ABSTRACT_H_
+