Merge pull request #18356 from ivashmak:update_ransac
authorMaksym Ivashechkin <maksimivashechkin@gmail.com>
Tue, 6 Oct 2020 20:37:49 +0000 (22:37 +0200)
committerGitHub <noreply@github.com>
Tue, 6 Oct 2020 20:37:49 +0000 (20:37 +0000)
* update new RANSAC

* fix warning

* change gamma values table

* resolve conflict

* resolve conflict

* GammaValues as singleton

16 files changed:
modules/calib3d/include/opencv2/calib3d.hpp
modules/calib3d/src/usac.hpp
modules/calib3d/src/usac/degeneracy.cpp
modules/calib3d/src/usac/dls_solver.cpp
modules/calib3d/src/usac/essential_solver.cpp
modules/calib3d/src/usac/estimator.cpp
modules/calib3d/src/usac/fundamental_solver.cpp
modules/calib3d/src/usac/gamma_values.cpp [new file with mode: 0644]
modules/calib3d/src/usac/gamma_values.hpp [deleted file]
modules/calib3d/src/usac/homography_solver.cpp
modules/calib3d/src/usac/local_optimization.cpp
modules/calib3d/src/usac/pnp_solver.cpp
modules/calib3d/src/usac/quality.cpp
modules/calib3d/src/usac/ransac_solvers.cpp
modules/calib3d/src/usac/sampler.cpp
modules/calib3d/src/usac/utils.cpp

index d14a810..f0b8beb 100644 (file)
@@ -450,7 +450,7 @@ enum { LMEDS  = 4,  //!< least-median of squares algorithm
        USAC_FAST = 35,     //!< USAC, fast settings
        USAC_ACCURATE = 36, //!< USAC, accurate settings
        USAC_PROSAC = 37,   //!< USAC, sorted points, runs PROSAC
-       USAC_MAGSAC = 38    //!< USAC, sorted points, runs PROSAC
+       USAC_MAGSAC = 38    //!< USAC, runs MAGSAC++
      };
 
 enum SolvePnPMethod {
index f658e87..c18de92 100644 (file)
@@ -193,6 +193,26 @@ public:
     }
 };
 
+class GammaValues
+{
+    const double max_range_complete /*= 4.62*/, max_range_gamma /*= 1.52*/;
+    const int max_size_table /* = 3000 */;
+
+    std::vector<double> gamma_complete, gamma_incomplete, gamma;
+
+    GammaValues();  // use getSingleton()
+
+public:
+    static const GammaValues& getSingleton();
+
+    const std::vector<double>& getCompleteGammaValues() const;
+    const std::vector<double>& getIncompleteGammaValues() const;
+    const std::vector<double>& getGammaValues() const;
+    double getScaleOfGammaCompleteValues () const;
+    double getScaleOfGammaValues () const;
+    int getTableSize () const;
+};
+
 ////////////////////////////////////////// QUALITY ///////////////////////////////////////////
 class Quality : public Algorithm {
 public:
@@ -269,10 +289,6 @@ public:
     virtual bool isModelValid (const Mat &/*model*/, const std::vector<int> &/*sample*/) const {
         return true;
     }
-    virtual bool isModelValid (const Mat &/*model*/, const std::vector<int> &/*sample*/,
-            int /*sample_size*/) const {
-        return true;
-    }
     /*
      * Fix degenerate model.
      * Return true if model is degenerate, false - otherwise
@@ -286,7 +302,7 @@ public:
 
 class EpipolarGeometryDegeneracy : public Degeneracy {
 public:
-    static void recoverRank (Mat &model);
+    static void recoverRank (Mat &model, bool is_fundamental_mat);
     static Ptr<EpipolarGeometryDegeneracy> create (const Mat &points_, int sample_size_);
 };
 
@@ -405,9 +421,7 @@ struct SPRT_history {
     double epsilon, delta, A;
     // number of samples processed by test
     int tested_samples; // k
-    SPRT_history ()
-        : epsilon(0), delta(0), A(0)
-    {
+    SPRT_history () {
         tested_samples = 0;
     }
 };
@@ -465,7 +479,7 @@ class GridNeighborhoodGraph : public NeighborhoodGraph {
 public:
     static Ptr<GridNeighborhoodGraph> create(const Mat &points, int points_size,
             int cell_size_x_img1_, int cell_size_y_img1_,
-            int cell_size_x_img2_, int cell_size_y_img2_);
+            int cell_size_x_img2_, int cell_size_y_img2_, int max_neighbors);
 };
 
 ////////////////////////////////////// UNIFORM SAMPLER ////////////////////////////////////////////
@@ -568,7 +582,7 @@ namespace Math {
     // return skew symmetric matrix
     Matx33d getSkewSymmetric(const Vec3d &v_);
     // eliminate matrix with m rows and n columns to be upper triangular.
-    void eliminateUpperTriangular (std::vector<double> &a, int m, int n);
+    bool eliminateUpperTriangular (std::vector<double> &a, int m, int n);
     Matx33d rotVec2RotMat (const Vec3d &v);
     Vec3d rotMat2RotVec (const Matx33d &R);
 }
@@ -746,6 +760,7 @@ public:
     virtual int getLOInnerMaxIters() const = 0;
     virtual const std::vector<int> &getGridCellNumber () const = 0;
     virtual int getRandomGeneratorState () const = 0;
+    virtual int getMaxItersBeforeLO () const = 0;
 
     // setters
     virtual void setLocalOptimization (LocalOptimMethod lo_) = 0;
@@ -759,6 +774,7 @@ public:
     virtual void setLOIterations (int iters) = 0;
     virtual void setLOIterativeIters (int iters) = 0;
     virtual void setLOSampleSize (int lo_sample_size) = 0;
+    virtual void setThresholdMultiplierLO (double thr_mult) = 0;
     virtual void setRandomGeneratorState (int state) = 0;
 
     virtual void maskRequired (bool required) = 0;
index 75711e8..7963ef3 100644 (file)
@@ -18,17 +18,11 @@ public:
      * Do oriented constraint to verify if epipolar geometry is in front or behind the camera.
      * Return: true if all points are in front of the camers w.r.t. tested epipolar geometry - satisfies constraint.
      *         false - otherwise.
-     */
-    inline bool isModelValid(const Mat &F, const std::vector<int> &sample) const override {
-        return isModelValid(F, sample, min_sample_size);
-    }
-
-    /* Oriented constraint:
      * x'^T F x = 0
      * e' × x' ~+ Fx   <=>  λe' × x' = Fx, λ > 0
      * e  × x ~+ x'^T F
      */
-    inline bool isModelValid(const Mat &F_, const std::vector<int> &sample, int sample_size_) const override {
+    inline bool isModelValid(const Mat &F_, const std::vector<int> &sample) const override {
         // F is of rank 2, taking cross product of two rows we obtain null vector of F
         Vec3d ec_mat = F_.row(0).cross(F_.row(2));
         auto * ec = ec_mat.val; // of size 3x1
@@ -40,7 +34,6 @@ public:
             ec_mat = F_.row(1).cross(F_.row(2));
             ec = ec_mat.val;
         }
-        // F is 9x1 row-major ordered F matrix. ec is 3x1
         const auto * const F = (double *) F_.data;
 
         // without loss of generality, let the first point in sample be in front of the camera.
@@ -50,17 +43,12 @@ public:
         // sign1 = s1 * s2
         const double sign1 = (F[0]*points[pt+2]+F[3]*points[pt+3]+F[6])*(ec[1]-ec[2]*points[pt+1]);
 
-        int num_pts_behind = 0;
-        for (int i = 1; i < sample_size_; i++) {
+        for (int i = 1; i < min_sample_size; i++) {
             pt = 4 * sample[i];
             // if signum of the first point and tested point differs
             // then two points are on different sides of the camera.
             if (sign1*(F[0]*points[pt+2]+F[3]*points[pt+3]+F[6])*(ec[1]-ec[2]*points[pt+1])<0)
-                // if 3 points are behind the camera for non-minimal sample then model is
-                // not valid. Testing by one point as in case for minimal sample is not very
-                // precise. The number 3 was chosen experimentally.
-                if (min_sample_size == sample_size_ || ++num_pts_behind >= 3)
-                    return false;
+                return false;
         }
         return true;
     }
@@ -69,15 +57,20 @@ public:
         return makePtr<EpipolarGeometryDegeneracyImpl>(*points_mat, min_sample_size);
     }
 };
-void EpipolarGeometryDegeneracy::recoverRank (Mat &model) {
+void EpipolarGeometryDegeneracy::recoverRank (Mat &model, bool is_fundamental_mat) {
     /*
      * Do singular value decomposition.
      * Make last eigen value zero of diagonal matrix of singular values.
      */
     Matx33d U, Vt;
     Vec3d w;
-    SVD::compute(model, w, U, Vt, SVD::FULL_UV + SVD::MODIFY_A);
-    model = Mat(U * Matx33d(w(0), 0, 0, 0, w(1), 0, 0, 0, 0) * Vt);
+    SVD::compute(model, w, U, Vt, SVD::MODIFY_A);
+    if (is_fundamental_mat)
+        model = Mat(U * Matx33d(w(0), 0, 0, 0, w(1), 0, 0, 0, 0) * Vt);
+    else {
+        const double mean_singular_val = (w[0] + w[1]) * 0.5;
+        model = Mat(U * Matx33d(mean_singular_val, 0, 0, 0, mean_singular_val, 0, 0, 0, 0) * Vt);
+    }
 }
 Ptr<EpipolarGeometryDegeneracy> EpipolarGeometryDegeneracy::create (const Mat &points_,
         int sample_size_) {
@@ -157,11 +150,15 @@ private:
     const float * const points;
     const Mat * points_mat;
     const Ptr<ReprojectionErrorForward> h_reproj_error;
+    Ptr<HomographyNonMinimalSolver> h_non_min_solver;
     const EpipolarGeometryDegeneracyImpl ep_deg;
     // threshold to find inliers for homography model
     const double homography_threshold, log_conf = log(0.05);
     // points (1-7) to verify in sample
     std::vector<std::vector<int>> h_sample {{0,1,2},{3,4,5},{0,1,6},{3,4,6},{2,5,6}};
+    std::vector<int> h_inliers;
+    std::vector<double> weights;
+    std::vector<Mat> h_models;
     const int points_size, sample_size;
 public:
 
@@ -179,14 +176,12 @@ public:
             h_sample.emplace_back(std::vector<int>{3, 6, 7});
             h_sample.emplace_back(std::vector<int>{2, 4, 7});
         }
+        h_inliers = std::vector<int>(points_size);
+        h_non_min_solver = HomographyNonMinimalSolver::create(points_);
     }
     inline bool isModelValid(const Mat &F, const std::vector<int> &sample) const override {
         return ep_deg.isModelValid(F, sample);
     }
-    inline bool isModelValid(const Mat &F, const std::vector<int> &sample, int sample_size_) const override {
-        return ep_deg.isModelValid(F, sample, sample_size_);
-    }
-
     bool recoverIfDegenerate (const std::vector<int> &sample, const Mat &F_best,
                  Mat &non_degenerate_model, Score &non_degenerate_model_score) override {
         non_degenerate_model_score = Score(); // set worst case
@@ -239,23 +234,32 @@ public:
             }
 
             // compute H
-            const Matx33d H = A - e_prime * (M.inv() * b).t();
+            Matx33d H = A - e_prime * (M.inv() * b).t();
 
-            int inliers_on_plane = 0;
+            int inliers_out_plane = 0;
             h_reproj_error->setModelParameters(Mat(H));
 
             // find inliers from sample, points related to H, x' ~ Hx
             for (int s = 0; s < sample_size; s++)
-                if (h_reproj_error->getError(sample[s]) < homography_threshold)
-                    if (++inliers_on_plane >= 5)
+                if (h_reproj_error->getError(sample[s]) > homography_threshold)
+                    if (++inliers_out_plane > 2)
                         break;
 
             // if there are at least 5 points lying on plane then F is degenerate
-            if (inliers_on_plane >= 5) {
+            if (inliers_out_plane <= 2) {
                 is_model_degenerate = true;
 
+                // update homography by polishing on all inliers
+                int h_inls_cnt = 0;
+                const auto &h_errors = h_reproj_error->getErrors(Mat(H));
+                for (int pt = 0; pt < points_size; pt++)
+                    if (h_errors[pt] < homography_threshold)
+                        h_inliers[h_inls_cnt++] = pt;
+                if (h_non_min_solver->estimate(h_inliers, h_inls_cnt, h_models, weights) != 0)
+                    H = Matx33d(h_models[0]);
+
                 Mat newF;
-                const Score newF_score = planeAndParallaxRANSAC(H, newF);
+                const Score newF_score = planeAndParallaxRANSAC(H, newF, h_errors);
                 if (newF_score.isBetter(non_degenerate_model_score)) {
                     // store non degenerate model
                     non_degenerate_model_score = newF_score;
@@ -271,7 +275,7 @@ public:
     }
 private:
     // RANSAC with plane-and-parallax to find new Fundamental matrix
-    Score planeAndParallaxRANSAC (const Matx33d &H, Mat &best_F) {
+    Score planeAndParallaxRANSAC (const Matx33d &H, Mat &best_F, const std::vector<float> &h_errors) {
         int max_iters = 100; // with 95% confidence assume at least 17% of inliers
         Score best_score;
         for (int iters = 0; iters < max_iters; iters++) {
@@ -282,18 +286,17 @@ private:
                 h_outlier2 = rng.uniform(0, points_size);
 
             // find outliers of homography H
-            if (h_reproj_error->getError(h_outlier1) > homography_threshold &&
-                h_reproj_error->getError(h_outlier2) > homography_threshold) {
+            if (h_errors[h_outlier1] > homography_threshold &&
+                h_errors[h_outlier2] > homography_threshold) {
 
                 // do plane and parallax with outliers of H
-                const Vec3d pt1 (points[4*h_outlier1], points[4*h_outlier1+1], 1);
-                const Vec3d pt2 (points[4*h_outlier2], points[4*h_outlier2+1], 1);
-                const Vec3d pt1_prime (points[4*h_outlier1+2],points[4*h_outlier1+3],1);
-                const Vec3d pt2_prime (points[4*h_outlier2+2],points[4*h_outlier2+3],1);
-
                 // F = [(p1' x Hp1) x (p2' x Hp2)]_x H
-                const Matx33d F = Math::getSkewSymmetric((pt1_prime.cross(H * pt1)).cross
-                                                         (pt2_prime.cross(H * pt2))) * H;
+                const Matx33d F = Math::getSkewSymmetric(
+                       (Vec3d(points[4*h_outlier1+2], points[4*h_outlier1+3], 1).cross   // p1'
+                   (H * Vec3d(points[4*h_outlier1  ], points[4*h_outlier1+1], 1))).cross // Hp1
+                       (Vec3d(points[4*h_outlier2+2], points[4*h_outlier2+3], 1).cross   // p2'
+                   (H * Vec3d(points[4*h_outlier2  ], points[4*h_outlier2+1], 1)))       // Hp2
+                 ) * H;
 
                 const Score score = quality->getScore(Mat(F));
                 if (score.isBetter(best_score)) {
index 0bffc6c..0898734 100644 (file)
@@ -214,7 +214,6 @@ public:
 
             if (all_points_in_front_of_camera) {
                 Mat model;
-//                hconcat(rot_mat, soln_translation, model);
                 hconcat(Math::rotVec2RotMat(Math::rotMat2RotVec(rot_mat)), soln_translation, model);
                 models_.emplace_back(K * model);
             }
index a2e2467..0adca09 100644 (file)
@@ -54,7 +54,8 @@ public:
         const int num_cols = 9, num_e_mat = 4;
         double ee[36]; // 9*4
         // eliminate linear equations
-        Math::eliminateUpperTriangular(coefficients, 5, num_cols);
+        if (!Math::eliminateUpperTriangular(coefficients, 5, num_cols))
+            return 0;
         for (int i = 0; i < num_e_mat; i++)
             for (int j = 5; j < num_cols; j++)
                 ee[num_cols * i + j] = (i + 5 == j) ? 1 : 0;
@@ -244,25 +245,91 @@ Ptr<EssentialMinimalSolverStewenius5pts> EssentialMinimalSolverStewenius5pts::cr
 class EssentialNonMinimalSolverImpl : public EssentialNonMinimalSolver {
 private:
     const Mat * points_mat;
-    const Ptr<FundamentalNonMinimalSolver> non_min_fundamental;
+    const float * const points;
 public:
     /*
      * Input calibrated points K^-1 x.
      * Linear 8 points algorithm is used for estimation.
      */
     explicit EssentialNonMinimalSolverImpl (const Mat &points_) :
-        points_mat(&points_), non_min_fundamental(FundamentalNonMinimalSolver::create(points_)) {}
+        points_mat(&points_), points ((float *) points_.data) {}
 
     int estimate (const std::vector<int> &sample, int sample_size, std::vector<Mat>
-        &models, const std::vector<double> &weights) const override {
-        return non_min_fundamental->estimate(sample, sample_size, models, weights);
-    }
-    int getMinimumRequiredSampleSize() const override {
-        return non_min_fundamental->getMinimumRequiredSampleSize();
-    }
-    int getMaxNumberOfSolutions () const override {
-        return non_min_fundamental->getMaxNumberOfSolutions();
+            &models, const std::vector<double> &weights) const override {
+        if (sample_size < getMinimumRequiredSampleSize())
+            return 0;
+
+        // ------- 8 points algorithm with Eigen and covariance matrix --------------
+        double a[9] = {0, 0, 0, 0, 0, 0, 0, 0, 1};
+        double AtA[81] = {0}; // 9x9
+
+        if (weights.empty()) {
+            for (int i = 0; i < sample_size; i++) {
+                const int pidx = 4*sample[i];
+                const double x1 = points[pidx  ], y1 = points[pidx+1],
+                             x2 = points[pidx+2], y2 = points[pidx+3];
+                a[0] = x2*x1;
+                a[1] = x2*y1;
+                a[2] = x2;
+                a[3] = y2*x1;
+                a[4] = y2*y1;
+                a[5] = y2;
+                a[6] = x1;
+                a[7] = y1;
+
+                // calculate covariance for eigen
+                for (int row = 0; row < 9; row++)
+                    for (int col = row; col < 9; col++)
+                        AtA[row*9+col] += a[row]*a[col];
+            }
+        } else {
+            for (int i = 0; i < sample_size; i++) {
+                const int smpl = 4*sample[i];
+                const double weight = weights[i];
+                const double x1 = points[smpl  ], y1 = points[smpl+1],
+                             x2 = points[smpl+2], y2 = points[smpl+3];
+                const double weight_times_x2 = weight * x2,
+                             weight_times_y2 = weight * y2;
+
+                a[0] = weight_times_x2 * x1;
+                a[1] = weight_times_x2 * y1;
+                a[2] = weight_times_x2;
+                a[3] = weight_times_y2 * x1;
+                a[4] = weight_times_y2 * y1;
+                a[5] = weight_times_y2;
+                a[6] = weight * x1;
+                a[7] = weight * y1;
+                a[8] = weight;
+
+                // calculate covariance for eigen
+                for (int row = 0; row < 9; row++)
+                    for (int col = row; col < 9; col++)
+                        AtA[row*9+col] += a[row]*a[col];
+            }
+        }
+
+        // copy symmetric part of covariance matrix
+        for (int j = 1; j < 9; j++)
+            for (int z = 0; z < j; z++)
+                AtA[j*9+z] = AtA[z*9+j];
+
+#ifdef HAVE_EIGEN
+        models = std::vector<Mat>{ Mat_<double>(3,3) };
+        const Eigen::JacobiSVD<Eigen::Matrix<double, 9, 9>> svd((Eigen::Matrix<double, 9, 9>(AtA)),
+                Eigen::ComputeFullV);
+        // extract the last nullspace
+        Eigen::Map<Eigen::Matrix<double, 9, 1>>((double *)models[0].data) = svd.matrixV().col(8);
+#else
+        Matx<double, 9, 9> AtA_(AtA), U, Vt;
+        Vec<double, 9> W;
+        SVD::compute(AtA_, W, U, Vt, SVD::FULL_UV + SVD::MODIFY_A);
+        models = std::vector<Mat> { Mat_<double>(3, 3, Vt.val + 72 /*=8*9*/) };
+#endif
+        FundamentalDegeneracy::recoverRank(models[0], false /*E*/);
+        return 1;
     }
+    int getMinimumRequiredSampleSize() const override { return 8; }
+    int getMaxNumberOfSolutions () const override { return 1; }
     Ptr<NonMinimalSolver> clone () const override {
         return makePtr<EssentialNonMinimalSolverImpl>(*points_mat);
     }
index 46c9d4c..91abe30 100644 (file)
@@ -69,13 +69,7 @@ public:
     }
     int estimateModelNonMinimalSample(const std::vector<int> &sample, int sample_size,
             std::vector<Mat> &models, const std::vector<double> &weights) const override {
-        std::vector<Mat> Fs;
-        const int num_est_models = non_min_solver->estimate(sample, sample_size, Fs, weights);
-        int valid_models_count = 0;
-        for (int i = 0; i < num_est_models; i++)
-            if (degeneracy->isModelValid (Fs[i], sample, sample_size))
-                models[valid_models_count++] = Fs[i];
-        return valid_models_count;
+        return non_min_solver->estimate(sample, sample_size, models, weights);
     }
     int getMaxNumSolutions () const override {
         return min_solver->getMaxNumberOfSolutions();
@@ -123,13 +117,7 @@ public:
 
     int estimateModelNonMinimalSample(const std::vector<int> &sample, int sample_size,
             std::vector<Mat> &models, const std::vector<double> &weights) const override {
-        std::vector<Mat> Es;
-        const int num_est_models = non_min_solver->estimate(sample, sample_size, Es, weights);
-        int valid_models_count = 0;
-        for (int i = 0; i < num_est_models; i++)
-            if (degeneracy->isModelValid (Es[i], sample, sample_size))
-                models[valid_models_count++] = Es[i];
-        return valid_models_count;
+        return non_min_solver->estimate(sample, sample_size, models, weights);
     };
     int getMaxNumSolutions () const override {
         return min_solver->getMaxNumberOfSolutions();
@@ -231,7 +219,7 @@ Ptr<PnPEstimator> PnPEstimator::create (const Ptr<MinimalSolver> &min_solver_,
 
 ///////////////////////////////////////////// ERROR /////////////////////////////////////////
 // Symmetric Reprojection Error
-class ReprojectedErrorSymmetricImpl : public ReprojectionErrorSymmetric {
+class ReprojectionErrorSymmetricImpl : public ReprojectionErrorSymmetric {
 private:
     const Mat * points_mat;
     const float * const points;
@@ -239,7 +227,7 @@ private:
     float minv11, minv12, minv13, minv21, minv22, minv23, minv31, minv32, minv33;
     std::vector<float> errors;
 public:
-    explicit ReprojectedErrorSymmetricImpl (const Mat &points_)
+    explicit ReprojectionErrorSymmetricImpl (const Mat &points_)
         : points_mat(&points_), points ((float *) points_.data)
         , m11(0), m12(0), m13(0), m21(0), m22(0), m23(0), m31(0), m32(0), m33(0)
         , minv11(0), minv12(0), minv13(0), minv21(0), minv22(0), minv23(0), minv31(0), minv32(0), minv33(0)
@@ -287,23 +275,23 @@ public:
         return errors;
     }
     Ptr<Error> clone () const override {
-        return makePtr<ReprojectedErrorSymmetricImpl>(*points_mat);
+        return makePtr<ReprojectionErrorSymmetricImpl>(*points_mat);
     }
 };
 Ptr<ReprojectionErrorSymmetric>
 ReprojectionErrorSymmetric::create(const Mat &points) {
-    return makePtr<ReprojectedErrorSymmetricImpl>(points);
+    return makePtr<ReprojectionErrorSymmetricImpl>(points);
 }
 
 // Forward Reprojection Error
-class ReprojectedErrorForwardImpl : public ReprojectionErrorForward {
+class ReprojectionErrorForwardImpl : public ReprojectionErrorForward {
 private:
     const Mat * points_mat;
     const float * const points;
     float m11, m12, m13, m21, m22, m23, m31, m32, m33;
     std::vector<float> errors;
 public:
-    explicit ReprojectedErrorForwardImpl (const Mat &points_)
+    explicit ReprojectionErrorForwardImpl (const Mat &points_)
         : points_mat(&points_), points ((float *)points_.data)
         , m11(0), m12(0), m13(0), m21(0), m22(0), m23(0), m31(0), m32(0), m33(0)
         , errors(points_.rows)
@@ -338,12 +326,12 @@ public:
         return errors;
     }
     Ptr<Error> clone () const override {
-        return makePtr<ReprojectedErrorForwardImpl>(*points_mat);
+        return makePtr<ReprojectionErrorForwardImpl>(*points_mat);
     }
 };
 Ptr<ReprojectionErrorForward>
 ReprojectionErrorForward::create(const Mat &points) {
-    return makePtr<ReprojectedErrorForwardImpl>(points);
+    return makePtr<ReprojectionErrorForwardImpl>(points);
 }
 
 class SampsonErrorImpl : public SampsonError {
@@ -527,7 +515,7 @@ Ptr<ReprojectionErrorPmatrix> ReprojectionErrorPmatrix::create(const Mat &points
 
 ///////////////////////////////////////////////////////////////////////////////////////////////////
 // Computes forward reprojection error for affine transformation.
-class ReprojectedDistanceAffineImpl : public ReprojectionErrorAffine {
+class ReprojectionDistanceAffineImpl : public ReprojectionErrorAffine {
 private:
     /*
      * m11 m12 m13
@@ -539,7 +527,7 @@ private:
     float m11, m12, m13, m21, m22, m23;
     std::vector<float> errors;
 public:
-    explicit ReprojectedDistanceAffineImpl (const Mat &points_)
+    explicit ReprojectionDistanceAffineImpl (const Mat &points_)
         : points_mat(&points_), points ((float *) points_.data)
         , m11(0), m12(0), m13(0), m21(0), m22(0), m23(0)
         , errors(points_.rows)
@@ -569,12 +557,12 @@ public:
         return errors;
     }
     Ptr<Error> clone () const override {
-        return makePtr<ReprojectedDistanceAffineImpl>(*points_mat);
+        return makePtr<ReprojectionDistanceAffineImpl>(*points_mat);
     }
 };
 Ptr<ReprojectionErrorAffine>
 ReprojectionErrorAffine::create(const Mat &points) {
-    return makePtr<ReprojectedDistanceAffineImpl>(points);
+    return makePtr<ReprojectionDistanceAffineImpl>(points);
 }
 
 ////////////////////////////////////// NORMALIZING TRANSFORMATION /////////////////////////
index 5048aa0..00d4feb 100644 (file)
@@ -20,7 +20,7 @@ public:
 
     int estimate (const std::vector<int> &sample, std::vector<Mat> &models) const override {
         const int m = 7, n = 9; // rows, cols
-        std::vector<double> a(m*n);
+        std::vector<double> a(63); // m*n
         auto * a_ = &a[0];
 
         for (int i = 0; i < m; i++ ) {
@@ -39,7 +39,8 @@ public:
             (*a_++) = 1;
         }
 
-        Math::eliminateUpperTriangular(a, m, n);
+        if (!Math::eliminateUpperTriangular(a, m, n))
+            return 0;
 
         /*
          [a11 a12 a13 a14 a15 a16 a17 a18 a19]
@@ -165,7 +166,7 @@ public:
 
     int estimate (const std::vector<int> &sample, std::vector<Mat> &models) const override {
         const int m = 8, n = 9; // rows, cols
-        std::vector<double> a(m*n);
+        std::vector<double> a(72); // m*n
         auto * a_ = &a[0];
 
         for (int i = 0; i < m; i++ ) {
@@ -184,7 +185,8 @@ public:
             (*a_++) = 1;
         }
 
-        Math::eliminateUpperTriangular(a, m, n);
+        if (!Math::eliminateUpperTriangular(a, m, n))
+            return 0;
 
         /*
          [a11 a12 a13 a14 a15 a16 a17 a18 a19]
@@ -313,16 +315,15 @@ public:
         Matx<double, 9, 9> AtA_(AtA), U, Vt;
         Vec<double, 9> W;
         SVD::compute(AtA_, W, U, Vt, SVD::FULL_UV + SVD::MODIFY_A);
-        models = std::vector<Mat> { Mat(Vt.row(8).reshape<3,3>()) };
+        models = std::vector<Mat> { Mat_<double>(3, 3, Vt.val + 72 /*=8*9*/) };
 #endif
+        FundamentalDegeneracy::recoverRank(models[0], true/*F*/);
 
         // Transpose T2 (in T2 the lower diagonal is zero)
         T2(2, 0) = T2(0, 2); T2(2, 1) = T2(1, 2);
         T2(0, 2) = 0; T2(1, 2) = 0;
 
         models[0] = T2 * models[0] * T1;
-
-        FundamentalDegeneracy::recoverRank(models[0]);
         return 1;
     }
 
diff --git a/modules/calib3d/src/usac/gamma_values.cpp b/modules/calib3d/src/usac/gamma_values.cpp
new file mode 100644 (file)
index 0000000..1e82d8e
--- /dev/null
@@ -0,0 +1,107 @@
+// This file is part of OpenCV project.
+// It is subject to the license terms in the LICENSE file found in the top-level directory
+// of this distribution and at http://opencv.org/license.html.
+
+#include "../precomp.hpp"
+#include "../usac.hpp"
+
+namespace cv { namespace usac {
+
+GammaValues::GammaValues()
+    : max_range_complete(4.62)
+    , max_range_gamma(1.52)
+    , max_size_table(3000)
+{
+    /*
+     * Gamma values for degrees of freedom n = 2 and sigma quantile 99% of chi distribution
+     * (squared root of chi-squared distribution), in the range <0; 4.62> for complete values
+     * and <0, 1.52> for gamma values.
+     * Number of anchor points is 50. Other values are approximated using linear interpolation
+     */
+    const int number_of_anchor_points = 50;
+    std::vector<double> gamma_complete_anchor = std::vector<double>
+       {1.7724538509055159, 1.182606138403832, 0.962685372890749, 0.8090013493715409,
+        0.6909325812483967, 0.5961199186942078, 0.5179833984918483, 0.45248091153099873,
+        0.39690029823142897, 0.34930995878395804, 0.3082742109224103, 0.2726914551904204,
+        0.2416954924567404, 0.21459196516027726, 0.190815580770884, 0.16990026519723456,
+        0.15145770273372564, 0.13516150988807635, 0.12073530906427948, 0.10794357255251595,
+        0.0965844793065712, 0.08648426334883624, 0.07749268706639856, 0.06947937608738222,
+        0.062330823249820304, 0.05594791865006951, 0.05024389794830681, 0.045142626552664405,
+        0.040577155977706246, 0.03648850256745103, 0.03282460924226794, 0.029539458909083157,
+        0.02659231432268328, 0.023947063970062663, 0.021571657306774475, 0.01943761564987864,
+        0.017519607407598645, 0.015795078236273064, 0.014243928262247118, 0.012848229767187478,
+        0.011591979769030827, 0.010460882783057988, 0.009442159753944173, 0.008524379737926344,
+        0.007697311406424555, 0.006951791856026042, 0.006279610558635573, 0.005673406581042374,
+        0.005126577454218803, 0.004633198286725555};
+
+    std::vector<double> gamma_incomplete_anchor = std::vector<double>
+        {0.0, 0.01773096912803939, 0.047486924846289004, 0.08265437835139826, 0.120639343491371,
+         0.15993024714868515, 0.19954558593754865, 0.23881753504915218, 0.2772830648361923,
+         0.3146208784488923, 0.3506114446939783, 0.385110056889967, 0.41802785670077697,
+         0.44931803198258047, 0.47896553567848993, 0.5069792897777948, 0.5333861945970247,
+         0.5582264802664578, 0.581550074874317, 0.6034137543595729, 0.6238789008764282,
+         0.6430097394182639, 0.6608719532994989, 0.6775316015953519, 0.6930542783709592,
+         0.7075044661695132, 0.7209450459078338, 0.733436932830201, 0.7450388140484766,
+         0.7558069678435577, 0.7657951486073097, 0.7750545242776943, 0.7836336555215403,
+         0.7915785078697124, 0.798932489600361, 0.8057365094688473, 0.8120290494534339,
+         0.8178462485678104, 0.8232219945197348, 0.8281880205973585, 0.8327740056635289,
+         0.8370076755516281, 0.8409149044990385, 0.8445198155381767, 0.8478448790000731,
+         0.8509110084798414, 0.8537376537738418, 0.8563428904304485, 0.8587435056647642,
+         0.8609550804762539};
+
+    std::vector<double> gamma_anchor = std::vector<double>
+        {1.7724538509055159, 1.427187162582056, 1.2890382454046982, 1.186244737282388,
+         1.1021938955410173, 1.0303674512016956, 0.9673796229113404, 0.9111932804012203,
+         0.8604640514722175, 0.814246149432561, 0.7718421763436497, 0.7327190195355812,
+         0.6964573670982434, 0.6627197089339725, 0.6312291454822467, 0.6017548373556638,
+         0.5741017071093776, 0.5481029597580317, 0.523614528104858, 0.5005108666212138,
+         0.478681711577816, 0.4580295473431646, 0.43846759792922513, 0.41991821541471996,
+         0.40231157253054745, 0.38558459136185, 0.3696800574963841, 0.3545458813847714,
+         0.340134477710645, 0.32640224021796493, 0.3133090943985706, 0.3008181141790485,
+         0.28889519159238314, 0.2775087506098113, 0.2666294980086962, 0.2562302054837794,
+         0.24628551826026082, 0.2367717863030556, 0.22766691488600885, 0.21895023182476064,
+         0.2106023691144937, 0.2026051570714723, 0.19494152937027823, 0.18759543761063277,
+         0.1805517742482484, 0.17379630289125447, 0.16731559510356395, 0.1610969729740903,
+         0.1551284568099053, 0.14939871739550692};
+
+    // allocate tables
+    gamma_complete = std::vector<double>(max_size_table);
+    gamma_incomplete = std::vector<double>(max_size_table);
+    gamma = std::vector<double>(max_size_table);
+
+    const int step = (int)((double)max_size_table / (number_of_anchor_points-1));
+    int arr_cnt = 0;
+    for (int i = 0; i < number_of_anchor_points-1; i++) {
+         const double complete_x0 = gamma_complete_anchor[i], step_complete = (gamma_complete_anchor[i+1] - complete_x0) / step;
+         const double incomplete_x0 = gamma_incomplete_anchor[i], step_incomplete = (gamma_incomplete_anchor[i+1] - incomplete_x0) / step;
+         const double gamma_x0 = gamma_anchor[i], step_gamma = (gamma_anchor[i+1] - gamma_x0) / step;
+
+         for (int j = 0; j < step; j++) {
+             gamma_complete[arr_cnt] = complete_x0 + j * step_complete;
+             gamma_incomplete[arr_cnt] = incomplete_x0 + j * step_incomplete;
+             gamma[arr_cnt++] = gamma_x0 + j * step_gamma;
+         }
+    }
+    if (arr_cnt < max_size_table) {
+        // if array was not totally filled (in some cases can happen) then copy last values
+        std::fill(gamma_complete.begin()+arr_cnt, gamma_complete.end(), gamma_complete[arr_cnt-1]);
+        std::fill(gamma_incomplete.begin()+arr_cnt, gamma_incomplete.end(), gamma_incomplete[arr_cnt-1]);
+        std::fill(gamma.begin()+arr_cnt, gamma.end(), gamma[arr_cnt-1]);
+    }
+}
+
+const std::vector<double>& GammaValues::getCompleteGammaValues() const { return gamma_complete; }
+const std::vector<double>& GammaValues::getIncompleteGammaValues() const { return gamma_incomplete; }
+const std::vector<double>& GammaValues::getGammaValues() const { return gamma; }
+double GammaValues::getScaleOfGammaCompleteValues () const { return gamma_complete.size() / max_range_complete; }
+double GammaValues::getScaleOfGammaValues () const { return gamma.size() / max_range_gamma; }
+int GammaValues::getTableSize () const { return max_size_table; }
+
+/* static */
+const GammaValues& GammaValues::getSingleton()
+{
+    static GammaValues g_gammaValues;
+    return g_gammaValues;
+}
+
+}}  // namespace
diff --git a/modules/calib3d/src/usac/gamma_values.hpp b/modules/calib3d/src/usac/gamma_values.hpp
deleted file mode 100644 (file)
index 5a30289..0000000
+++ /dev/null
@@ -1,237 +0,0 @@
-// This file is part of OpenCV project.
-// It is subject to the license terms in the LICENSE file found in the top-level directory
-// of this distribution and at http://opencv.org/license.html.
-
-constexpr int stored_gamma_number = 2999;
-constexpr int stored_incomplete_gamma_number = 3999;
-constexpr double scale_of_stored_gammas_n4 = 1647.8;
-constexpr double scale_of_stored_incomplete_gammas_n4 = 603.64;
-
-constexpr double stored_complete_gamma_values_n4[] = {0.88623,0.88618,0.8861,0.88599,0.88587,0.88573,0.88557,0.8854,0.88522,0.88502,0.88482,0.8846,0.88438,0.88415,0.8839,0.88365,0.8834,0.88313,0.88285,0.88257,0.88229,0.88199,0.88169,0.88138,0.88107,0.88075,0.88042,0.88009,0.87975,0.87941,0.87906,0.8787,0.87834,0.87798,0.87761,0.87724,0.87686,0.87647,0.87609,0.87569,0.8753,0.8749,0.87449,0.87408,0.87367,0.87325,0.87283,0.8724,0.87197,0.87154,0.8711,0.87066,0.87022,0.86977,0.86932,0.86886,0.8684,0.86794,0.86748,0.86701,0.86654,0.86606,0.86559,0.86511,0.86462,0.86414,0.86365,0.86315,0.86266,0.86216,0.86166,0.86116,0.86065,0.86014,0.85963,0.85911,0.8586,0.85808,0.85755,0.85703,0.8565,0.85597,0.85544,0.85491,0.85437,0.85383,0.85329,0.85275,0.8522,0.85165,0.8511,0.85055,0.85,0.84944,0.84888,0.84832,0.84776,0.8472,0.84663,0.84606,
-0.84549,0.84492,0.84434,0.84377,0.84319,0.84261,0.84203,0.84145,0.84086,0.84027,0.83969,0.83909,0.8385,0.83791,0.83731,0.83672,0.83612,0.83552,0.83492,0.83431,0.83371,0.8331,0.8325,0.83189,0.83128,0.83066,0.83005,0.82943,0.82882,0.8282,0.82758,0.82696,0.82634,0.82572,0.82509,0.82447,0.82384,0.82321,0.82258,0.82195,0.82132,0.82068,0.82005,0.81941,0.81878,0.81814,0.8175,0.81686,0.81622,0.81558,0.81493,0.81429,0.81364,0.813,0.81235,0.8117,0.81105,0.8104,0.80975,0.80909,0.80844,0.80779,0.80713,0.80647,0.80582,0.80516,0.8045,0.80384,0.80318,0.80251,0.80185,0.80119,0.80052,0.79986,0.79919,0.79852,0.79786,0.79719,0.79652,0.79585,0.79518,0.7945,0.79383,0.79316,0.79248,0.79181,0.79113,0.79046,0.78978,0.7891,0.78843,0.78775,0.78707,0.78639,0.78571,0.78503,0.78434,0.78366,0.78298,0.78229,
-0.78161,0.78093,0.78024,0.77955,0.77887,0.77818,0.77749,0.7768,0.77612,0.77543,0.77474,0.77405,0.77336,0.77266,0.77197,0.77128,0.77059,0.76989,0.7692,0.76851,0.76781,0.76712,0.76642,0.76573,0.76503,0.76433,0.76364,0.76294,0.76224,0.76154,0.76085,0.76015,0.75945,0.75875,0.75805,0.75735,0.75665,0.75595,0.75525,0.75454,0.75384,0.75314,0.75244,0.75174,0.75103,0.75033,0.74963,0.74892,0.74822,0.74751,0.74681,0.74611,0.7454,0.7447,0.74399,0.74328,0.74258,0.74187,0.74117,0.74046,0.73975,0.73905,0.73834,0.73763,0.73692,0.73622,0.73551,0.7348,0.73409,0.73339,0.73268,0.73197,0.73126,0.73055,0.72984,0.72913,0.72842,0.72772,0.72701,0.7263,0.72559,0.72488,0.72417,0.72346,0.72275,0.72204,0.72133,0.72062,0.71991,0.7192,0.71849,0.71778,0.71707,0.71636,0.71565,0.71494,0.71423,0.71352,0.71281,0.71209,
-0.71138,0.71067,0.70996,0.70925,0.70854,0.70783,0.70712,0.70641,0.7057,0.70499,0.70428,0.70357,0.70286,0.70215,0.70144,0.70073,0.70002,0.69931,0.6986,0.69789,0.69718,0.69647,0.69576,0.69505,0.69434,0.69363,0.69292,0.69221,0.6915,0.69079,0.69008,0.68937,0.68867,0.68796,0.68725,0.68654,0.68583,0.68512,0.68441,0.68371,0.683,0.68229,0.68158,0.68088,0.68017,0.67946,0.67875,0.67805,0.67734,0.67663,0.67593,0.67522,0.67451,0.67381,0.6731,0.6724,0.67169,0.67099,0.67028,0.66958,0.66887,0.66817,0.66746,0.66676,0.66605,0.66535,0.66465,0.66394,0.66324,0.66254,0.66183,0.66113,0.66043,0.65973,0.65903,0.65832,0.65762,0.65692,0.65622,0.65552,0.65482,0.65412,0.65342,0.65272,0.65202,0.65132,0.65062,0.64992,0.64922,0.64853,0.64783,0.64713,0.64643,0.64574,0.64504,0.64434,0.64365,0.64295,0.64225,0.64156,
-0.64086,0.64017,0.63947,0.63878,0.63808,0.63739,0.6367,0.636,0.63531,0.63462,0.63393,0.63323,0.63254,0.63185,0.63116,0.63047,0.62978,0.62909,0.6284,0.62771,0.62702,0.62633,0.62564,0.62495,0.62427,0.62358,0.62289,0.6222,0.62152,0.62083,0.62014,0.61946,0.61877,0.61809,0.6174,0.61672,0.61604,0.61535,0.61467,0.61399,0.6133,0.61262,0.61194,0.61126,0.61058,0.6099,0.60922,0.60854,0.60786,0.60718,0.6065,0.60582,0.60514,0.60446,0.60379,0.60311,0.60243,0.60176,0.60108,0.60041,0.59973,0.59906,0.59838,0.59771,0.59703,0.59636,0.59569,0.59501,0.59434,0.59367,0.593,0.59233,0.59166,0.59099,0.59032,0.58965,0.58898,0.58831,0.58764,0.58698,0.58631,0.58564,0.58498,0.58431,0.58365,0.58298,0.58232,0.58165,0.58099,0.58032,0.57966,0.579,0.57834,0.57767,0.57701,0.57635,0.57569,0.57503,0.57437,0.57371,
-0.57305,0.5724,0.57174,0.57108,0.57042,0.56977,0.56911,0.56845,0.5678,0.56714,0.56649,0.56584,0.56518,0.56453,0.56388,0.56322,0.56257,0.56192,0.56127,0.56062,0.55997,0.55932,0.55867,0.55802,0.55738,0.55673,0.55608,0.55543,0.55479,0.55414,0.5535,0.55285,0.55221,0.55156,0.55092,0.55028,0.54963,0.54899,0.54835,0.54771,0.54707,0.54643,0.54579,0.54515,0.54451,0.54387,0.54323,0.5426,0.54196,0.54132,0.54069,0.54005,0.53942,0.53878,0.53815,0.53751,0.53688,0.53625,0.53562,0.53498,0.53435,0.53372,0.53309,0.53246,0.53183,0.5312,0.53058,0.52995,0.52932,0.52869,0.52807,0.52744,0.52682,0.52619,0.52557,0.52494,0.52432,0.5237,0.52307,0.52245,0.52183,0.52121,0.52059,0.51997,0.51935,0.51873,0.51811,0.51749,0.51688,0.51626,0.51564,0.51503,0.51441,0.5138,0.51318,0.51257,0.51195,0.51134,0.51073,0.51012,
-0.5095,0.50889,0.50828,0.50767,0.50706,0.50645,0.50585,0.50524,0.50463,0.50402,0.50342,0.50281,0.50221,0.5016,0.501,0.50039,0.49979,0.49919,0.49858,0.49798,0.49738,0.49678,0.49618,0.49558,0.49498,0.49438,0.49378,0.49318,0.49259,0.49199,0.49139,0.4908,0.4902,0.48961,0.48901,0.48842,0.48783,0.48724,0.48664,0.48605,0.48546,0.48487,0.48428,0.48369,0.4831,0.48251,0.48192,0.48134,0.48075,0.48016,0.47958,0.47899,0.47841,0.47782,0.47724,0.47666,0.47607,0.47549,0.47491,0.47433,0.47375,0.47317,0.47259,0.47201,0.47143,0.47085,0.47027,0.4697,0.46912,0.46854,0.46797,0.46739,0.46682,0.46625,0.46567,0.4651,0.46453,0.46396,0.46338,0.46281,0.46224,0.46167,0.4611,0.46054,0.45997,0.4594,0.45883,0.45827,0.4577,0.45713,0.45657,0.45601,0.45544,0.45488,0.45432,0.45375,0.45319,0.45263,0.45207,0.45151,
-0.45095,0.45039,0.44983,0.44927,0.44872,0.44816,0.4476,0.44705,0.44649,0.44594,0.44538,0.44483,0.44427,0.44372,0.44317,0.44262,0.44207,0.44151,0.44096,0.44041,0.43987,0.43932,0.43877,0.43822,0.43767,0.43713,0.43658,0.43604,0.43549,0.43495,0.4344,0.43386,0.43332,0.43277,0.43223,0.43169,0.43115,0.43061,0.43007,0.42953,0.42899,0.42846,0.42792,0.42738,0.42684,0.42631,0.42577,0.42524,0.4247,0.42417,0.42364,0.4231,0.42257,0.42204,0.42151,0.42098,0.42045,0.41992,0.41939,0.41886,0.41833,0.4178,0.41728,0.41675,0.41623,0.4157,0.41517,0.41465,0.41413,0.4136,0.41308,0.41256,0.41204,0.41152,0.41099,0.41047,0.40996,0.40944,0.40892,0.4084,0.40788,0.40736,0.40685,0.40633,0.40582,0.4053,0.40479,0.40427,0.40376,0.40325,0.40274,0.40222,0.40171,0.4012,0.40069,0.40018,0.39967,0.39916,0.39866,0.39815,
-0.39764,0.39714,0.39663,0.39612,0.39562,0.39511,0.39461,0.39411,0.3936,0.3931,0.3926,0.3921,0.3916,0.3911,0.3906,0.3901,0.3896,0.3891,0.3886,0.38811,0.38761,0.38711,0.38662,0.38612,0.38563,0.38514,0.38464,0.38415,0.38366,0.38316,0.38267,0.38218,0.38169,0.3812,0.38071,0.38022,0.37974,0.37925,0.37876,0.37827,0.37779,0.3773,0.37682,0.37633,0.37585,0.37536,0.37488,0.3744,0.37392,0.37343,0.37295,0.37247,0.37199,0.37151,0.37103,0.37056,0.37008,0.3696,0.36912,0.36865,0.36817,0.3677,0.36722,0.36675,0.36627,0.3658,0.36533,0.36485,0.36438,0.36391,0.36344,0.36297,0.3625,0.36203,0.36156,0.36109,0.36062,0.36016,0.35969,0.35922,0.35876,0.35829,0.35783,0.35736,0.3569,0.35644,0.35597,0.35551,0.35505,0.35459,0.35413,0.35367,0.35321,0.35275,0.35229,0.35183,0.35137,0.35092,0.35046,0.35,
-0.34955,0.34909,0.34864,0.34818,0.34773,0.34728,0.34682,0.34637,0.34592,0.34547,0.34502,0.34457,0.34412,0.34367,0.34322,0.34277,0.34233,0.34188,0.34143,0.34099,0.34054,0.34009,0.33965,0.33921,0.33876,0.33832,0.33788,0.33743,0.33699,0.33655,0.33611,0.33567,0.33523,0.33479,0.33435,0.33391,0.33348,0.33304,0.3326,0.33216,0.33173,0.33129,0.33086,0.33042,0.32999,0.32956,0.32912,0.32869,0.32826,0.32783,0.3274,0.32697,0.32654,0.32611,0.32568,0.32525,0.32482,0.32439,0.32397,0.32354,0.32311,0.32269,0.32226,0.32184,0.32141,0.32099,0.32057,0.32014,0.31972,0.3193,0.31888,0.31846,0.31804,0.31762,0.3172,0.31678,0.31636,0.31594,0.31553,0.31511,0.31469,0.31428,0.31386,0.31345,0.31303,0.31262,0.3122,0.31179,0.31138,0.31097,0.31055,0.31014,0.30973,0.30932,0.30891,0.3085,0.30809,0.30768,0.30728,0.30687,
-0.30646,0.30606,0.30565,0.30524,0.30484,0.30443,0.30403,0.30363,0.30322,0.30282,0.30242,0.30202,0.30161,0.30121,0.30081,0.30041,0.30001,0.29961,0.29922,0.29882,0.29842,0.29802,0.29763,0.29723,0.29683,0.29644,0.29604,0.29565,0.29526,0.29486,0.29447,0.29408,0.29368,0.29329,0.2929,0.29251,0.29212,0.29173,0.29134,0.29095,0.29056,0.29018,0.28979,0.2894,0.28901,0.28863,0.28824,0.28786,0.28747,0.28709,0.2867,0.28632,0.28594,0.28555,0.28517,0.28479,0.28441,0.28403,0.28365,0.28327,0.28289,0.28251,0.28213,0.28175,0.28138,0.281,0.28062,0.28025,0.27987,0.27949,0.27912,0.27875,0.27837,0.278,0.27762,0.27725,0.27688,0.27651,0.27614,0.27577,0.27539,0.27502,0.27466,0.27429,0.27392,0.27355,0.27318,0.27281,0.27245,0.27208,0.27171,0.27135,0.27098,0.27062,0.27025,0.26989,0.26953,0.26916,0.2688,0.26844,
-0.26808,0.26772,0.26735,0.26699,0.26663,0.26627,0.26592,0.26556,0.2652,0.26484,0.26448,0.26413,0.26377,0.26341,0.26306,0.2627,0.26235,0.26199,0.26164,0.26129,0.26093,0.26058,0.26023,0.25988,0.25953,0.25917,0.25882,0.25847,0.25812,0.25777,0.25743,0.25708,0.25673,0.25638,0.25603,0.25569,0.25534,0.255,0.25465,0.2543,0.25396,0.25362,0.25327,0.25293,0.25259,0.25224,0.2519,0.25156,0.25122,0.25088,0.25054,0.2502,0.24986,0.24952,0.24918,0.24884,0.2485,0.24817,0.24783,0.24749,0.24715,0.24682,0.24648,0.24615,0.24581,0.24548,0.24515,0.24481,0.24448,0.24415,0.24381,0.24348,0.24315,0.24282,0.24249,0.24216,0.24183,0.2415,0.24117,0.24084,0.24051,0.24019,0.23986,0.23953,0.23921,0.23888,0.23855,0.23823,0.2379,0.23758,0.23726,0.23693,0.23661,0.23629,0.23596,0.23564,0.23532,0.235,0.23468,0.23436,
-0.23404,0.23372,0.2334,0.23308,0.23276,0.23244,0.23212,0.23181,0.23149,0.23117,0.23086,0.23054,0.23022,0.22991,0.2296,0.22928,0.22897,0.22865,0.22834,0.22803,0.22772,0.2274,0.22709,0.22678,0.22647,0.22616,0.22585,0.22554,0.22523,0.22492,0.22461,0.22431,0.224,0.22369,0.22338,0.22308,0.22277,0.22247,0.22216,0.22186,0.22155,0.22125,0.22094,0.22064,0.22034,0.22003,0.21973,0.21943,0.21913,0.21883,0.21853,0.21823,0.21793,0.21763,0.21733,0.21703,0.21673,0.21643,0.21613,0.21584,0.21554,0.21524,0.21495,0.21465,0.21436,0.21406,0.21377,0.21347,0.21318,0.21288,0.21259,0.2123,0.212,0.21171,0.21142,0.21113,0.21084,0.21055,0.21026,0.20997,0.20968,0.20939,0.2091,0.20881,0.20852,0.20823,0.20795,0.20766,0.20737,0.20709,0.2068,0.20652,0.20623,0.20595,0.20566,0.20538,0.20509,0.20481,0.20453,0.20424,
-0.20396,0.20368,0.2034,0.20312,0.20284,0.20256,0.20228,0.202,0.20172,0.20144,0.20116,0.20088,0.2006,0.20032,0.20005,0.19977,0.19949,0.19922,0.19894,0.19866,0.19839,0.19811,0.19784,0.19757,0.19729,0.19702,0.19675,0.19647,0.1962,0.19593,0.19566,0.19539,0.19511,0.19484,0.19457,0.1943,0.19403,0.19376,0.1935,0.19323,0.19296,0.19269,0.19242,0.19216,0.19189,0.19162,0.19136,0.19109,0.19082,0.19056,0.19029,0.19003,0.18977,0.1895,0.18924,0.18898,0.18871,0.18845,0.18819,0.18793,0.18766,0.1874,0.18714,0.18688,0.18662,0.18636,0.1861,0.18584,0.18558,0.18533,0.18507,0.18481,0.18455,0.1843,0.18404,0.18378,0.18353,0.18327,0.18302,0.18276,0.18251,0.18225,0.182,0.18174,0.18149,0.18124,0.18098,0.18073,0.18048,0.18023,0.17998,0.17972,0.17947,0.17922,0.17897,0.17872,0.17847,0.17822,0.17797,0.17773,
-0.17748,0.17723,0.17698,0.17674,0.17649,0.17624,0.176,0.17575,0.1755,0.17526,0.17501,0.17477,0.17452,0.17428,0.17404,0.17379,0.17355,0.17331,0.17306,0.17282,0.17258,0.17234,0.1721,0.17186,0.17162,0.17137,0.17113,0.1709,0.17066,0.17042,0.17018,0.16994,0.1697,0.16946,0.16923,0.16899,0.16875,0.16852,0.16828,0.16804,0.16781,0.16757,0.16734,0.1671,0.16687,0.16663,0.1664,0.16617,0.16593,0.1657,0.16547,0.16524,0.165,0.16477,0.16454,0.16431,0.16408,0.16385,0.16362,0.16339,0.16316,0.16293,0.1627,0.16247,0.16224,0.16202,0.16179,0.16156,0.16133,0.16111,0.16088,0.16065,0.16043,0.1602,0.15998,0.15975,0.15953,0.1593,0.15908,0.15885,0.15863,0.15841,0.15818,0.15796,0.15774,0.15752,0.1573,0.15707,0.15685,0.15663,0.15641,0.15619,0.15597,0.15575,0.15553,0.15531,0.15509,0.15487,0.15466,0.15444,
-0.15422,0.154,0.15379,0.15357,0.15335,0.15314,0.15292,0.1527,0.15249,0.15227,0.15206,0.15184,0.15163,0.15142,0.1512,0.15099,0.15077,0.15056,0.15035,0.15014,0.14992,0.14971,0.1495,0.14929,0.14908,0.14887,0.14866,0.14845,0.14824,0.14803,0.14782,0.14761,0.1474,0.14719,0.14699,0.14678,0.14657,0.14636,0.14616,0.14595,0.14574,0.14554,0.14533,0.14512,0.14492,0.14471,0.14451,0.1443,0.1441,0.1439,0.14369,0.14349,0.14329,0.14308,0.14288,0.14268,0.14248,0.14227,0.14207,0.14187,0.14167,0.14147,0.14127,0.14107,0.14087,0.14067,0.14047,0.14027,0.14007,0.13987,0.13967,0.13948,0.13928,0.13908,0.13888,0.13869,0.13849,0.13829,0.1381,0.1379,0.1377,0.13751,0.13731,0.13712,0.13692,0.13673,0.13654,0.13634,0.13615,0.13595,0.13576,0.13557,0.13538,0.13518,0.13499,0.1348,0.13461,0.13442,0.13423,0.13403,
-0.13384,0.13365,0.13346,0.13327,0.13308,0.1329,0.13271,0.13252,0.13233,0.13214,0.13195,0.13176,0.13158,0.13139,0.1312,0.13102,0.13083,0.13064,0.13046,0.13027,0.13009,0.1299,0.12972,0.12953,0.12935,0.12916,0.12898,0.12879,0.12861,0.12843,0.12824,0.12806,0.12788,0.1277,0.12752,0.12733,0.12715,0.12697,0.12679,0.12661,0.12643,0.12625,0.12607,0.12589,0.12571,0.12553,0.12535,0.12517,0.12499,0.12481,0.12464,0.12446,0.12428,0.1241,0.12393,0.12375,0.12357,0.1234,0.12322,0.12304,0.12287,0.12269,0.12252,0.12234,0.12217,0.12199,0.12182,0.12164,0.12147,0.1213,0.12112,0.12095,0.12078,0.1206,0.12043,0.12026,0.12009,0.11992,0.11974,0.11957,0.1194,0.11923,0.11906,0.11889,0.11872,0.11855,0.11838,0.11821,0.11804,0.11787,0.1177,0.11754,0.11737,0.1172,0.11703,0.11686,0.1167,0.11653,0.11636,0.1162,
-0.11603,0.11586,0.1157,0.11553,0.11537,0.1152,0.11504,0.11487,0.11471,0.11454,0.11438,0.11421,0.11405,0.11389,0.11372,0.11356,0.1134,0.11323,0.11307,0.11291,0.11275,0.11259,0.11242,0.11226,0.1121,0.11194,0.11178,0.11162,0.11146,0.1113,0.11114,0.11098,0.11082,0.11066,0.1105,0.11035,0.11019,0.11003,0.10987,0.10971,0.10955,0.1094,0.10924,0.10908,0.10893,0.10877,0.10861,0.10846,0.1083,0.10815,0.10799,0.10784,0.10768,0.10753,0.10737,0.10722,0.10706,0.10691,0.10675,0.1066,0.10645,0.10629,0.10614,0.10599,0.10584,0.10568,0.10553,0.10538,0.10523,0.10508,0.10493,0.10478,0.10462,0.10447,0.10432,0.10417,0.10402,0.10387,0.10372,0.10357,0.10342,0.10328,0.10313,0.10298,0.10283,0.10268,0.10253,0.10239,0.10224,0.10209,0.10194,0.1018,0.10165,0.1015,0.10136,0.10121,0.10107,0.10092,0.10077,0.10063,
-0.10048,0.10034,0.10019,0.10005,0.09991,0.09976,0.09962,0.09947,0.09933,0.09919,0.09904,0.0989,0.09876,0.09862,0.09847,0.09833,0.09819,0.09805,0.09791,0.09776,0.09762,0.09748,0.09734,0.0972,0.09706,0.09692,0.09678,0.09664,0.0965,0.09636,0.09622,0.09608,0.09594,0.09581,0.09567,0.09553,0.09539,0.09525,0.09512,0.09498,0.09484,0.0947,0.09457,0.09443,0.09429,0.09416,0.09402,0.09388,0.09375,0.09361,0.09348,0.09334,0.09321,0.09307,0.09294,0.0928,0.09267,0.09253,0.0924,0.09227,0.09213,0.092,0.09187,0.09173,0.0916,0.09147,0.09134,0.0912,0.09107,0.09094,0.09081,0.09068,0.09055,0.09041,0.09028,0.09015,0.09002,0.08989,0.08976,0.08963,0.0895,0.08937,0.08924,0.08911,0.08898,0.08885,0.08873,0.0886,0.08847,0.08834,0.08821,0.08808,0.08796,0.08783,0.0877,0.08757,0.08745,0.08732,0.08719,0.08707,
-0.08694,0.08681,0.08669,0.08656,0.08644,0.08631,0.08619,0.08606,0.08594,0.08581,0.08569,0.08556,0.08544,0.08531,0.08519,0.08507,0.08494,0.08482,0.0847,0.08457,0.08445,0.08433,0.0842,0.08408,0.08396,0.08384,0.08372,0.08359,0.08347,0.08335,0.08323,0.08311,0.08299,0.08287,0.08275,0.08263,0.08251,0.08239,0.08227,0.08215,0.08203,0.08191,0.08179,0.08167,0.08155,0.08143,0.08132,0.0812,0.08108,0.08096,0.08084,0.08073,0.08061,0.08049,0.08037,0.08026,0.08014,0.08002,0.07991,0.07979,0.07967,0.07956,0.07944,0.07933,0.07921,0.0791,0.07898,0.07887,0.07875,0.07864,0.07852,0.07841,0.07829,0.07818,0.07806,0.07795,0.07784,0.07772,0.07761,0.0775,0.07738,0.07727,0.07716,0.07705,0.07693,0.07682,0.07671,0.0766,0.07649,0.07637,0.07626,0.07615,0.07604,0.07593,0.07582,0.07571,0.0756,0.07549,0.07538,0.07527,
-0.07516,0.07505,0.07494,0.07483,0.07472,0.07461,0.0745,0.07439,0.07428,0.07418,0.07407,0.07396,0.07385,0.07374,0.07364,0.07353,0.07342,0.07331,0.07321,0.0731,0.07299,0.07289,0.07278,0.07267,0.07257,0.07246,0.07236,0.07225,0.07214,0.07204,0.07193,0.07183,0.07172,0.07162,0.07151,0.07141,0.07131,0.0712,0.0711,0.07099,0.07089,0.07079,0.07068,0.07058,0.07048,0.07037,0.07027,0.07017,0.07006,0.06996,0.06986,0.06976,0.06965,0.06955,0.06945,0.06935,0.06925,0.06915,0.06905,0.06894,0.06884,0.06874,0.06864,0.06854,0.06844,0.06834,0.06824,0.06814,0.06804,0.06794,0.06784,0.06774,0.06764,0.06754,0.06745,0.06735,0.06725,0.06715,0.06705,0.06695,0.06685,0.06676,0.06666,0.06656,0.06646,0.06637,0.06627,0.06617,0.06607,0.06598,0.06588,0.06578,0.06569,0.06559,0.0655,0.0654,0.0653,0.06521,0.06511,0.06502,
-0.06492,0.06483,0.06473,0.06464,0.06454,0.06445,0.06435,0.06426,0.06416,0.06407,0.06397,0.06388,0.06379,0.06369,0.0636,0.06351,0.06341,0.06332,0.06323,0.06313,0.06304,0.06295,0.06286,0.06276,0.06267,0.06258,0.06249,0.0624,0.06231,0.06221,0.06212,0.06203,0.06194,0.06185,0.06176,0.06167,0.06158,0.06149,0.0614,0.06131,0.06122,0.06113,0.06104,0.06095,0.06086,0.06077,0.06068,0.06059,0.0605,0.06041,0.06032,0.06023,0.06014,0.06006,0.05997,0.05988,0.05979,0.0597,0.05962,0.05953,0.05944,0.05935,0.05927,0.05918,0.05909,0.059,0.05892,0.05883,0.05874,0.05866,0.05857,0.05849,0.0584,0.05831,0.05823,0.05814,0.05806,0.05797,0.05789,0.0578,0.05771,0.05763,0.05755,0.05746,0.05738,0.05729,0.05721,0.05712,0.05704,0.05695,0.05687,0.05679,0.0567,0.05662,0.05654,0.05645,0.05637,0.05629,0.0562,0.05612,
-0.05604,0.05596,0.05587,0.05579,0.05571,0.05563,0.05554,0.05546,0.05538,0.0553,0.05522,0.05514,0.05505,0.05497,0.05489,0.05481,0.05473,0.05465,0.05457,0.05449,0.05441,0.05433,0.05425,0.05417,0.05409,0.05401,0.05393,0.05385,0.05377,0.05369,0.05361,0.05353,0.05345,0.05337,0.0533,0.05322,0.05314,0.05306,0.05298,0.0529,0.05283,0.05275,0.05267,0.05259,0.05251,0.05244,0.05236,0.05228,0.0522,0.05213,0.05205,0.05197,0.0519,0.05182,0.05174,0.05167,0.05159,0.05151,0.05144,0.05136,0.05129,0.05121,0.05113,0.05106,0.05098,0.05091,0.05083,0.05076,0.05068,0.05061,0.05053,0.05046,0.05038,0.05031,0.05024,0.05016,0.05009,0.05001,0.04994,0.04986,0.04979,0.04972,0.04964,0.04957,0.0495,0.04942,0.04935,0.04928,0.0492,0.04913,0.04906,0.04899,0.04891,0.04884,0.04877,0.0487,0.04863,0.04855,0.04848,0.04841,
-0.04834,0.04827,0.0482,0.04812,0.04805,0.04798,0.04791,0.04784,0.04777,0.0477,0.04763,0.04756,0.04749,0.04742,0.04735,0.04728,0.04721,0.04714,0.04707,0.047,0.04693,0.04686,0.04679,0.04672,0.04665,0.04658,0.04651,0.04644,0.04637,0.0463,0.04624,0.04617,0.0461,0.04603,0.04596,0.04589,0.04583,0.04576,0.04569,0.04562,0.04555,0.04549,0.04542,0.04535,0.04529,0.04522,0.04515,0.04508,0.04502,0.04495,0.04488,0.04482,0.04475,0.04468,0.04462,0.04455,0.04449,0.04442,0.04435,0.04429,0.04422,0.04416,0.04409,0.04403,0.04396,0.0439,0.04383,0.04376,0.0437,0.04364,0.04357,0.04351,0.04344,0.04338,0.04331,0.04325,0.04318,0.04312,0.04306,0.04299,0.04293,0.04286,0.0428,0.04274,0.04267,0.04261,0.04255,0.04248,0.04242,0.04236,0.04229,0.04223,0.04217,0.04211,0.04204,0.04198,0.04192,0.04186,0.04179,0.04173,
-0.04167,0.04161,0.04155,0.04148,0.04142,0.04136,0.0413,0.04124,0.04118,0.04112,0.04105,0.04099,0.04093,0.04087,0.04081,0.04075,0.04069,0.04063,0.04057,0.04051,0.04045,0.04039,0.04033,0.04027,0.04021,0.04015,0.04009,0.04003,0.03997,0.03991,0.03985,0.03979,0.03973,0.03967,0.03961,0.03955,0.0395,0.03944,0.03938,0.03932,0.03926,0.0392,0.03914,0.03909,0.03903,0.03897,0.03891,0.03885,0.0388,0.03874,0.03868,0.03862,0.03857,0.03851,0.03845,0.03839,0.03834,0.03828,0.03822,0.03817,0.03811,0.03805,0.038,0.03794,0.03788,0.03783,0.03777,0.03771,0.03766,0.0376,0.03754,0.03749,0.03743,0.03738,0.03732,0.03727,0.03721,0.03715,0.0371,0.03704,0.03699,0.03693,0.03688,0.03682,0.03677,0.03671,0.03666,0.0366,0.03655,0.0365,0.03644,0.03639,0.03633,0.03628,0.03622,0.03617,0.03612,0.03606,0.03601,0.03595,
-0.0359,0.03585,0.03579,0.03574,0.03569,0.03563,0.03558,0.03553,0.03547,0.03542,0.03537,0.03532,0.03526,0.03521,0.03516,0.03511,0.03505,0.035,0.03495,0.0349,0.03484,0.03479,0.03474,0.03469,0.03464,0.03459,0.03453,0.03448,0.03443,0.03438,0.03433,0.03428,0.03423,0.03417,0.03412,0.03407,0.03402,0.03397,0.03392,0.03387,0.03382,0.03377,0.03372,0.03367,0.03362,0.03357,0.03352,0.03347,0.03342,0.03337,0.03332,0.03327,0.03322,0.03317,0.03312,0.03307,0.03302,0.03297,0.03292,0.03287,0.03282,0.03277,0.03272,0.03267,0.03263,0.03258,0.03253,0.03248,0.03243,0.03238,0.03233,0.03229,0.03224,0.03219,0.03214,0.03209,0.03205,0.032,0.03195,0.0319,0.03185,0.03181,0.03176,0.03171,0.03166,0.03162,0.03157,0.03152,0.03147,0.03143,0.03138,0.03133,0.03129,0.03124,0.03119,0.03115,0.0311,0.03105,0.03101,0.03096,
-0.03091,0.03087,0.03082,0.03078,0.03073,0.03068,0.03064,0.03059,0.03055,0.0305,0.03045,0.03041,0.03036,0.03032,0.03027,0.03023,0.03018,0.03014,0.03009,0.03005,0.03,0.02996,0.02991,0.02987,0.02982,0.02978,0.02973,0.02969,0.02964,0.0296,0.02955,0.02951,0.02947,0.02942,0.02938,0.02933,0.02929,0.02925,0.0292,0.02916,0.02911,0.02907,0.02903,0.02898,0.02894,0.0289,0.02885,0.02881,0.02877,0.02872,0.02868,0.02864,0.02859,0.02855,0.02851,0.02847,0.02842,0.02838,0.02834,0.0283,0.02825,0.02821,0.02817,0.02813,0.02808,0.02804,0.028,0.02796,0.02792,0.02787,0.02783,0.02779,0.02775,0.02771,0.02767,0.02762,0.02758,0.02754,0.0275,0.02746,0.02742,0.02738,0.02734,0.02729,0.02725,0.02721,0.02717,0.02713,0.02709,0.02705,0.02701,0.02697,0.02693,0.02689,0.02685,0.02681,0.02677,0.02673,0.02669,0.02665,
-0.02661,0.02657,0.02653,0.02649,0.02645,0.02641,0.02637,0.02633,0.02629,0.02625,0.02621,0.02617,0.02613,0.02609,0.02605,0.02601,0.02597,0.02593,0.0259,0.02586,0.02582,0.02578,0.02574,0.0257,0.02566,0.02562,0.02559,0.02555,0.02551,0.02547,0.02543,0.02539,0.02536,0.02532,0.02528,0.02524,0.0252,0.02517,0.02513,0.02509,0.02505,0.02501,0.02498,0.02494,0.0249,0.02486,0.02483,0.02479,0.02475,0.02472,0.02468,0.02464,0.0246,0.02457,0.02453,0.02449,0.02446,0.02442,0.02438,0.02435,0.02431,0.02427,0.02424,0.0242,0.02416,0.02413,0.02409,0.02405,0.02402,0.02398,0.02395,0.02391,0.02387,0.02384,0.0238,0.02377,0.02373,0.02369,0.02366,0.02362,0.02359,0.02355,0.02352,0.02348,0.02345,0.02341,0.02338,0.02334,0.02331,0.02327,0.02323,0.0232,0.02316,0.02313,0.0231,0.02306,0.02303,0.02299,0.02296,0.02292,
-0.02289,0.02285,0.02282,0.02278,0.02275,0.02272,0.02268,0.02265,0.02261,0.02258,0.02254,0.02251,0.02248,0.02244,0.02241,0.02238,0.02234,0.02231,0.02227,0.02224,0.02221,0.02217,0.02214,0.02211,0.02207,0.02204,0.02201,0.02197,0.02194,0.02191,0.02187,0.02184,0.02181,0.02178,0.02174,0.02171,0.02168,0.02164,0.02161,0.02158,0.02155,0.02151,0.02148,0.02145,0.02142,0.02138,0.02135,0.02132,0.02129,0.02126,0.02122,0.02119,0.02116,0.02113,0.0211,0.02106,0.02103,0.021,0.02097,0.02094,0.02091,0.02087,0.02084,0.02081,0.02078,0.02075,0.02072,0.02069,0.02065,0.02062,0.02059,0.02056,0.02053,0.0205,0.02047,0.02044,0.02041,0.02038,0.02035,0.02031,0.02028,0.02025,0.02022,0.02019,0.02016,0.02013,0.0201,0.02007,0.02004,0.02001,0.01998,0.01995,0.01992,0.01989,0.01986,0.01983,0.0198,0.01977,0.01974,0.01971,
-0.01968,0.01965,0.01962,0.01959,0.01956,0.01953,0.0195,0.01947,0.01944,0.01941,0.01938,0.01935,0.01933,0.0193,0.01927,0.01924,0.01921,0.01918,0.01915,0.01912,0.01909,0.01906,0.01904,0.01901,0.01898,0.01895,0.01892,0.01889,0.01886,0.01883,0.01881,0.01878,0.01875,0.01872,0.01869,0.01866,0.01864,0.01861,0.01858,0.01855,0.01852,0.0185,0.01847,0.01844,0.01841,0.01838,0.01836,0.01833,0.0183,0.01827,0.01825,0.01822,0.01819,0.01816,0.01814,0.01811,0.01808,0.01805,0.01803,0.018,0.01797,0.01794,0.01792,0.01789,0.01786,0.01784,0.01781,0.01778,0.01775,0.01773,0.0177,0.01767,0.01765,0.01762,0.01759,0.01757,0.01754,0.01751,0.01749,0.01746,0.01744,0.01741,0.01738,0.01736,0.01733,0.0173,0.01728,0.01725,0.01722,0.0172,0.01717,0.01715,0.01712,0.01709,0.01707,0.01704,0.01702,0.01699,0.01697,0.01694,
-0.01691,0.01689,0.01686,0.01684,0.01681,0.01679,0.01676,0.01674,0.01671,0.01669,0.01666,0.01663,0.01661,0.01658,0.01656,0.01653,0.01651,0.01648,0.01646,0.01643,0.01641,0.01638,0.01636,0.01633,0.01631,0.01629,0.01626,0.01624,0.01621,0.01619,0.01616,0.01614,0.01611,0.01609,0.01606,0.01604,0.01602,0.01599,0.01597,0.01594,0.01592,0.01589,0.01587,0.01585,0.01582,0.0158,0.01577,0.01575,0.01573,0.0157,0.01568,0.01565,0.01563,0.01561,0.01558,0.01556,0.01554,0.01551,0.01549,0.01547,0.01544,0.01542,0.0154,0.01537,0.01535,0.01533,0.0153,0.01528,0.01526,0.01523,0.01521,0.01519,0.01516,0.01514,0.01512,0.01509,0.01507,0.01505,0.01503,0.015,0.01498,0.01496,0.01493,0.01491,0.01489,0.01487,0.01484,0.01482,0.0148,0.01478,0.01475,0.01473,0.01471,0.01469,0.01467,0.01464,0.01462,0.0146,0.01458,0.01455,
-0.01453,0.01451,0.01449,0.01447,0.01444,0.01442,0.0144,0.01438,0.01436,0.01433,0.01431,0.01429,0.01427,0.01425,0.01423,0.0142,0.01418,0.01416,0.01414,0.01412,0.0141,0.01408,0.01405,0.01403,0.01401,0.01399,0.01397,0.01395,0.01393,0.01391,0.01388,0.01386,0.01384,0.01382,0.0138,0.01378,0.01376,0.01374,0.01372,0.0137,0.01367,0.01365,0.01363,0.01361,0.01359,0.01357,0.01355,0.01353,0.01351,0.01349,0.01347,0.01345,0.01343,0.01341,0.01339,0.01337,0.01335,0.01333,0.0133,0.01328,0.01326,0.01324,0.01322,0.0132,0.01318,0.01316,0.01314,0.01312,0.0131,0.01308,0.01306,0.01304,0.01302,0.013,0.01298,0.01296,0.01295,0.01293,0.01291,0.01289,0.01287,0.01285,0.01283,0.01281,0.01279,0.01277,0.01275,0.01273,0.01271,0.01269,0.01267,0.01265,0.01263,0.01261,0.01259,0.01258,0.01256,0.01254,0.01252,0.0125,
-0.01248,0.01246,0.01244,0.01242,0.0124,0.01239,0.01237,0.01235,0.01233,0.01231,0.01229,0.01227,0.01225,0.01224,0.01222,0.0122,0.01218,0.01216,0.01214,0.01212,0.01211,0.01209,0.01207,0.01205,0.01203,0.01201,0.012,0.01198,0.01196,0.01194,0.01192,0.0119,0.01189,0.01187,0.01185,0.01183,0.01181,0.0118,0.01178,0.01176,0.01174,0.01172,0.01171,0.01169,0.01167,0.01165,0.01164,0.01162,0.0116,0.01158,0.01156,0.01155,0.01153,0.01151,0.01149,0.01148,0.01146,0.01144,0.01142,0.01141,0.01139,0.01137,0.01135,0.01134,0.01132,0.0113,0.01129,0.01127,0.01125,0.01123,0.01122,0.0112,0.01118,0.01117,0.01115,0.01113,0.01111,0.0111,0.01108,0.01106,0.01105,0.01103,0.01101,0.011,0.01098,0.01096,0.01095,0.01093,0.01091,0.0109,0.01088,0.01086,0.01085,0.01083,0.01081,0.0108,0.01078,0.01076,0.01075,0.01073,
-0.01071,0.0107,0.01068,0.01067,0.01065,0.01063,0.01062,0.0106,0.01058,0.01057,0.01055,0.01054,0.01052,0.0105,0.01049,0.01047,0.01046,0.01044,0.01042,0.01041,0.01039,0.01038,0.01036,0.01034,0.01033,0.01031,0.0103,0.01028,0.01027,0.01025,0.01023,0.01022,0.0102,0.01019,0.01017,0.01016,0.01014,0.01013,0.01011,0.01009,0.01008,0.01006,0.01005,0.01003,0.01002,0.01,0.00999,0.00997,0.00996,0.00994,0.00993,0.00991,0.0099,0.00988,0.00987,0.00985,0.00984,0.00982,0.00981,0.00979,0.00978,0.00976,0.00975,0.00973,0.00972,0.0097,0.00969,0.00967,0.00966,0.00964,0.00963,0.00961,0.0096,0.00958,0.00957,0.00955,0.00954,0.00952,0.00951,0.0095,0.00948,0.00947,0.00945,0.00944,0.00942,0.00941,0.00939,0.00938,0.00937,0.00935,0.00934,0.00932,0.00931,0.00929,0.00928,0.00927,0.00925,0.00924,0.00922,0.00921,
-0.0092,0.00918,0.00917,0.00915,0.00914,0.00913,0.00911,0.0091,0.00908,0.00907,0.00906,0.00904,0.00903,0.00901,0.009,0.00899,0.00897,0.00896,0.00895,0.00893,0.00892,0.0089,0.00889,0.00888,0.00886,0.00885,0.00884,0.00882,0.00881,0.0088,0.00878,0.00877,0.00876,0.00874,0.00873,0.00872,0.0087,0.00869,0.00868,0.00866,0.00865,0.00864,0.00862,0.00861,0.0086,0.00858,0.00857,0.00856,0.00854,0.00853,0.00852,0.0085,0.00849,0.00848,0.00847,0.00845,0.00844,0.00843,0.00841,0.0084,0.00839,0.00838,0.00836,0.00835,0.00834,0.00832,0.00831,0.0083,0.00829,0.00827,0.00826,0.00825,0.00824,0.00822,0.00821,0.0082,0.00819,0.00817,0.00816,0.00815,0.00813,0.00812,0.00811,0.0081,0.00809,0.00807,0.00806,0.00805,0.00804,0.00802,0.00801,0.008,0.00799,0.00797,0.00796,0.00795,0.00794,0.00793,0.00791,0.0079,
-0.00789,0.00788,0.00787,0.00785,0.00784,0.00783,0.00782,0.00781,0.00779,0.00778,0.00777,0.00776,0.00775,0.00773,0.00772,0.00771,0.0077,0.00769,0.00767,0.00766,0.00765,0.00764,0.00763,0.00762,0.0076,0.00759,0.00758,0.00757,0.00756,0.00755,0.00753,0.00752,0.00751,0.0075,0.00749,0.00748,0.00747,0.00745,0.00744,0.00743,0.00742,0.00741,0.0074,0.00739,0.00737,0.00736,0.00735,0.00734,0.00733,0.00732,0.00731,0.0073,0.00728,0.00727,0.00726,0.00725,0.00724,0.00723,0.00722,0.00721,0.0072,0.00718,0.00717,0.00716,0.00715,0.00714,0.00713,0.00712,0.00711,0.0071,0.00709,0.00707,0.00706,0.00705,0.00704,0.00703,0.00702,0.00701,0.007,0.00699,0.00698,0.00697,0.00696,0.00695,0.00693,0.00692,0.00691,0.0069,0.00689,0.00688,0.00687,0.00686,0.00685,0.00684,0.00683,0.00682,0.00681,0.0068,0.00679,0.00678,
-0.00677,0.00676,0.00675,0.00674,0.00673,0.00671,0.0067,0.00669,0.00668,0.00667,0.00666,0.00665,0.00664,0.00663,0.00662,0.00661,0.0066,0.00659,0.00658,0.00657,0.00656,0.00655,0.00654,0.00653,0.00652,0.00651,0.0065,0.00649,0.00648,0.00647,0.00646,0.00645,0.00644,0.00643,0.00642,0.00641,0.0064,0.00639,0.00638,0.00637,0.00636,0.00635,0.00634,0.00633,0.00632,0.00631,0.0063,0.00629,0.00629,0.00628,0.00627,0.00626,0.00625,0.00624,0.00623,0.00622,0.00621,0.0062,0.00619,0.00618,0.00617,0.00616,0.00615,0.00614,0.00613,0.00612,0.00611,0.0061,0.00609,0.00609,0.00608,0.00607,0.00606,0.00605,0.00604,0.00603,0.00602,0.00601,0.006,0.00599,0.00598,0.00597,0.00596,0.00596,0.00595,0.00594,0.00593,0.00592,0.00591,0.0059,0.00589,0.00588,0.00587,0.00586,0.00586,0.00585,0.00584,0.00583,0.00582,0.00581,
-0.0058,0.00579,0.00578,0.00578,0.00577,0.00576,0.00575,0.00574,0.00573,0.00572,0.00571,0.0057,0.0057,0.00569,0.00568,0.00567,0.00566,0.00565,0.00564,0.00563,0.00563,0.00562,0.00561,0.0056,0.00559,0.00558,0.00557,0.00557,0.00556,0.00555,0.00554,0.00553,0.00552,0.00551,0.00551,0.0055,0.00549,0.00548,0.00547,0.00546,0.00546,0.00545,0.00544,0.00543,0.00542,0.00541,0.00541,0.0054,0.00539,0.00538,0.00537,0.00536,0.00536,0.00535,0.00534,0.00533,0.00532,0.00531,0.00531,0.0053,0.00529,0.00528,0.00527,0.00527,0.00526,0.00525,0.00524,0.00523,0.00522,0.00522,0.00521,0.0052,0.00519,0.00518,0.00518,0.00517,0.00516,0.00515,0.00515,0.00514,0.00513,0.00512,0.00511,0.00511,0.0051,0.00509,0.00508,0.00507,0.00507,0.00506,0.00505,0.00504,0.00504,0.00503,0.00502,0.00501,0.005,0.005,0.00499,0.00498,
-0.00497,0.00497,0.00496,0.00495,0.00494,0.00494,0.00493,0.00492,0.00491,0.0049,0.0049,0.00489,0.00488,0.00487,0.00487,0.00486,0.00485,0.00484,0.00484,0.00483,0.00482,0.00481,0.00481,0.0048,0.00479,0.00479,0.00478,0.00477,0.00476,0.00476,0.00475,0.00474,0.00473,0.00473,0.00472,0.00471,0.0047,0.0047,0.00469,0.00468,0.00468,0.00467,0.00466,0.00465,0.00465,0.00464,0.00463,0.00463,0.00462,0.00461,0.0046,0.0046,0.00459,0.00458,0.00458,0.00457,0.00456,0.00455,0.00455,0.00454,0.00453,0.00453,0.00452,0.00451,0.00451,0.0045,0.00449,0.00448,0.00448,0.00447,0.00446,0.00446,0.00445,0.00444,0.00444,0.00443,0.00442,0.00442,0.00441,0.0044,0.0044,0.00439,0.00438,0.00438,0.00437,0.00436,0.00436,0.00435,0.00434,0.00434,0.00433,0.00432,0.00432,0.00431,0.0043,0.0043,0.00429,0.00428,0.00428,0.00427,
-0.00426,0.00426,0.00425,0.00424,0.00424,0.00423,0.00422,0.00422,0.00421,0.0042,0.0042,0.00419,0.00418,0.00418,0.00417,0.00416,0.00416,0.00415,0.00415,0.00414,0.00413,0.00413,0.00412,0.00411,0.00411,0.0041,0.00409,0.00409,0.00408,0.00408,0.00407,0.00406,0.00406,0.00405,0.00404,0.00404,0.00403,0.00403,0.00402,0.00401,0.00401,0.004,0.00399,0.00399,0.00398,0.00398,0.00397,0.00396,0.00396,0.00395,0.00395,0.00394,0.00393,0.00393,0.00392,0.00391,0.00391,0.0039,0.0039,0.00389,0.00388,0.00388,0.00387,0.00387,0.00386,0.00385,0.00385,0.00384,0.00384,0.00383,0.00383,0.00382,0.00381,0.00381,0.0038,0.0038,0.00379,0.00378,0.00378,0.00377,0.00377,0.00376,0.00375,0.00375,0.00374,0.00374,0.00373,0.00373,0.00372,0.00371,0.00371,0.0037,0.0037,0.00369,0.00369,0.00368,0.00367,0.00367,0.00366,0.00366};
-
-constexpr double stored_lower_incomplete_gamma_values_n4[] = {0.0,0.0,0.0,0.0,0.0,0.0,0.0,1e-05,1e-05,1e-05,1e-05,2e-05,2e-05,3e-05,3e-05,4e-05,4e-05,5e-05,6e-05,7e-05,8e-05,9e-05,0.0001,0.00011,0.00012,0.00014,0.00015,0.00016,0.00018,0.0002,0.00021,0.00023,0.00025,0.00027,0.00029,0.00031,0.00033,0.00036,0.00038,0.00041,0.00043,0.00046,0.00049,0.00051,0.00054,0.00058,0.00061,0.00064,0.00067,0.00071,0.00074,0.00078,0.00082,0.00086,0.0009,0.00094,0.00098,0.00102,0.00107,0.00111,0.00116,0.00121,0.00126,0.00131,0.00136,0.00141,0.00146,0.00152,0.00157,0.00163,0.00169,0.00175,0.00181,0.00187,0.00193,0.00199,0.00206,0.00212,0.00219,0.00226,0.00233,0.0024,0.00247,0.00254,0.00262,0.00269,0.00277,0.00285,0.00293,0.00301,0.00309,0.00317,0.00325,0.00334,0.00343,0.00351,0.0036,0.00369,0.00379,0.00388,
-0.00397,0.00407,0.00416,0.00426,0.00436,0.00446,0.00456,0.00467,0.00477,0.00488,0.00498,0.00509,0.0052,0.00531,0.00542,0.00554,0.00565,0.00577,0.00588,0.006,0.00612,0.00624,0.00636,0.00649,0.00661,0.00674,0.00687,0.007,0.00713,0.00726,0.00739,0.00752,0.00766,0.0078,0.00793,0.00807,0.00821,0.00836,0.0085,0.00864,0.00879,0.00894,0.00909,0.00924,0.00939,0.00954,0.0097,0.00985,0.01001,0.01017,0.01032,0.01049,0.01065,0.01081,0.01098,0.01114,0.01131,0.01148,0.01165,0.01182,0.01199,0.01217,0.01234,0.01252,0.0127,0.01288,0.01306,0.01324,0.01342,0.01361,0.01379,0.01398,0.01417,0.01436,0.01455,0.01474,0.01494,0.01513,0.01533,0.01553,0.01573,0.01593,0.01613,0.01633,0.01654,0.01675,0.01695,0.01716,0.01737,0.01758,0.0178,0.01801,0.01823,0.01844,0.01866,0.01888,0.0191,0.01932,0.01955,0.01977,
-0.02,0.02023,0.02046,0.02069,0.02092,0.02115,0.02138,0.02162,0.02186,0.02209,0.02233,0.02257,0.02282,0.02306,0.0233,0.02355,0.0238,0.02405,0.0243,0.02455,0.0248,0.02505,0.02531,0.02556,0.02582,0.02608,0.02634,0.0266,0.02687,0.02713,0.0274,0.02766,0.02793,0.0282,0.02847,0.02874,0.02902,0.02929,0.02957,0.02984,0.03012,0.0304,0.03068,0.03097,0.03125,0.03153,0.03182,0.03211,0.0324,0.03269,0.03298,0.03327,0.03356,0.03386,0.03415,0.03445,0.03475,0.03505,0.03535,0.03565,0.03596,0.03626,0.03657,0.03688,0.03719,0.0375,0.03781,0.03812,0.03843,0.03875,0.03906,0.03938,0.0397,0.04002,0.04034,0.04066,0.04099,0.04131,0.04164,0.04197,0.04229,0.04262,0.04296,0.04329,0.04362,0.04396,0.04429,0.04463,0.04497,0.04531,0.04565,0.04599,0.04633,0.04668,0.04702,0.04737,0.04772,0.04806,0.04841,0.04877,
-0.04912,0.04947,0.04983,0.05018,0.05054,0.0509,0.05126,0.05162,0.05198,0.05234,0.05271,0.05307,0.05344,0.05381,0.05418,0.05455,0.05492,0.05529,0.05566,0.05604,0.05641,0.05679,0.05717,0.05755,0.05793,0.05831,0.05869,0.05908,0.05946,0.05985,0.06023,0.06062,0.06101,0.0614,0.06179,0.06219,0.06258,0.06297,0.06337,0.06377,0.06417,0.06456,0.06496,0.06537,0.06577,0.06617,0.06658,0.06698,0.06739,0.0678,0.06821,0.06862,0.06903,0.06944,0.06985,0.07027,0.07068,0.0711,0.07152,0.07194,0.07236,0.07278,0.0732,0.07362,0.07405,0.07447,0.0749,0.07532,0.07575,0.07618,0.07661,0.07704,0.07747,0.07791,0.07834,0.07878,0.07921,0.07965,0.08009,0.08053,0.08097,0.08141,0.08185,0.0823,0.08274,0.08319,0.08363,0.08408,0.08453,0.08498,0.08543,0.08588,0.08633,0.08679,0.08724,0.08769,0.08815,0.08861,0.08907,0.08953,
-0.08999,0.09045,0.09091,0.09137,0.09184,0.0923,0.09277,0.09323,0.0937,0.09417,0.09464,0.09511,0.09558,0.09605,0.09653,0.097,0.09748,0.09795,0.09843,0.09891,0.09939,0.09987,0.10035,0.10083,0.10131,0.1018,0.10228,0.10277,0.10325,0.10374,0.10423,0.10472,0.10521,0.1057,0.10619,0.10668,0.10718,0.10767,0.10817,0.10866,0.10916,0.10966,0.11015,0.11065,0.11115,0.11166,0.11216,0.11266,0.11316,0.11367,0.11418,0.11468,0.11519,0.1157,0.11621,0.11672,0.11723,0.11774,0.11825,0.11876,0.11928,0.11979,0.12031,0.12082,0.12134,0.12186,0.12238,0.1229,0.12342,0.12394,0.12446,0.12498,0.12551,0.12603,0.12656,0.12708,0.12761,0.12814,0.12867,0.1292,0.12973,0.13026,0.13079,0.13132,0.13185,0.13239,0.13292,0.13346,0.13399,0.13453,0.13507,0.13561,0.13614,0.13668,0.13723,0.13777,0.13831,0.13885,0.1394,0.13994,
-0.14049,0.14103,0.14158,0.14212,0.14267,0.14322,0.14377,0.14432,0.14487,0.14542,0.14598,0.14653,0.14708,0.14764,0.14819,0.14875,0.14931,0.14986,0.15042,0.15098,0.15154,0.1521,0.15266,0.15322,0.15378,0.15435,0.15491,0.15547,0.15604,0.1566,0.15717,0.15774,0.1583,0.15887,0.15944,0.16001,0.16058,0.16115,0.16172,0.16229,0.16287,0.16344,0.16401,0.16459,0.16516,0.16574,0.16632,0.16689,0.16747,0.16805,0.16863,0.16921,0.16979,0.17037,0.17095,0.17153,0.17211,0.1727,0.17328,0.17387,0.17445,0.17504,0.17562,0.17621,0.1768,0.17739,0.17797,0.17856,0.17915,0.17974,0.18033,0.18093,0.18152,0.18211,0.1827,0.1833,0.18389,0.18449,0.18508,0.18568,0.18628,0.18687,0.18747,0.18807,0.18867,0.18927,0.18987,0.19047,0.19107,0.19167,0.19227,0.19287,0.19348,0.19408,0.19469,0.19529,0.1959,0.1965,0.19711,0.19771,
-0.19832,0.19893,0.19954,0.20015,0.20076,0.20137,0.20198,0.20259,0.2032,0.20381,0.20442,0.20504,0.20565,0.20626,0.20688,0.20749,0.20811,0.20872,0.20934,0.20996,0.21057,0.21119,0.21181,0.21243,0.21305,0.21367,0.21429,0.21491,0.21553,0.21615,0.21677,0.21739,0.21802,0.21864,0.21926,0.21989,0.22051,0.22114,0.22176,0.22239,0.22301,0.22364,0.22427,0.22489,0.22552,0.22615,0.22678,0.22741,0.22804,0.22867,0.2293,0.22993,0.23056,0.23119,0.23182,0.23246,0.23309,0.23372,0.23436,0.23499,0.23563,0.23626,0.2369,0.23753,0.23817,0.2388,0.23944,0.24008,0.24071,0.24135,0.24199,0.24263,0.24327,0.24391,0.24455,0.24519,0.24583,0.24647,0.24711,0.24775,0.24839,0.24904,0.24968,0.25032,0.25096,0.25161,0.25225,0.2529,0.25354,0.25419,0.25483,0.25548,0.25612,0.25677,0.25742,0.25806,0.25871,0.25936,0.26001,0.26065,
-0.2613,0.26195,0.2626,0.26325,0.2639,0.26455,0.2652,0.26585,0.2665,0.26715,0.26781,0.26846,0.26911,0.26976,0.27042,0.27107,0.27172,0.27238,0.27303,0.27368,0.27434,0.27499,0.27565,0.2763,0.27696,0.27762,0.27827,0.27893,0.27959,0.28024,0.2809,0.28156,0.28221,0.28287,0.28353,0.28419,0.28485,0.28551,0.28617,0.28683,0.28749,0.28815,0.28881,0.28947,0.29013,0.29079,0.29145,0.29211,0.29277,0.29344,0.2941,0.29476,0.29542,0.29609,0.29675,0.29741,0.29808,0.29874,0.2994,0.30007,0.30073,0.3014,0.30206,0.30273,0.30339,0.30406,0.30472,0.30539,0.30605,0.30672,0.30739,0.30805,0.30872,0.30939,0.31005,0.31072,0.31139,0.31206,0.31272,0.31339,0.31406,0.31473,0.3154,0.31607,0.31674,0.31741,0.31807,0.31874,0.31941,0.32008,0.32075,0.32142,0.32209,0.32276,0.32344,0.32411,0.32478,0.32545,0.32612,0.32679,
-0.32746,0.32813,0.32881,0.32948,0.33015,0.33082,0.33149,0.33217,0.33284,0.33351,0.33419,0.33486,0.33553,0.33621,0.33688,0.33755,0.33823,0.3389,0.33957,0.34025,0.34092,0.3416,0.34227,0.34295,0.34362,0.34429,0.34497,0.34564,0.34632,0.34699,0.34767,0.34835,0.34902,0.3497,0.35037,0.35105,0.35172,0.3524,0.35308,0.35375,0.35443,0.3551,0.35578,0.35646,0.35713,0.35781,0.35849,0.35916,0.35984,0.36052,0.3612,0.36187,0.36255,0.36323,0.3639,0.36458,0.36526,0.36594,0.36661,0.36729,0.36797,0.36865,0.36932,0.37,0.37068,0.37136,0.37204,0.37271,0.37339,0.37407,0.37475,0.37543,0.37611,0.37678,0.37746,0.37814,0.37882,0.3795,0.38018,0.38086,0.38153,0.38221,0.38289,0.38357,0.38425,0.38493,0.38561,0.38629,0.38696,0.38764,0.38832,0.389,0.38968,0.39036,0.39104,0.39172,0.3924,0.39308,0.39375,0.39443,
-0.39511,0.39579,0.39647,0.39715,0.39783,0.39851,0.39919,0.39987,0.40054,0.40122,0.4019,0.40258,0.40326,0.40394,0.40462,0.4053,0.40598,0.40666,0.40734,0.40801,0.40869,0.40937,0.41005,0.41073,0.41141,0.41209,0.41277,0.41345,0.41412,0.4148,0.41548,0.41616,0.41684,0.41752,0.4182,0.41887,0.41955,0.42023,0.42091,0.42159,0.42227,0.42295,0.42362,0.4243,0.42498,0.42566,0.42634,0.42701,0.42769,0.42837,0.42905,0.42973,0.4304,0.43108,0.43176,0.43244,0.43311,0.43379,0.43447,0.43515,0.43582,0.4365,0.43718,0.43786,0.43853,0.43921,0.43989,0.44056,0.44124,0.44192,0.44259,0.44327,0.44395,0.44462,0.4453,0.44598,0.44665,0.44733,0.448,0.44868,0.44936,0.45003,0.45071,0.45138,0.45206,0.45273,0.45341,0.45408,0.45476,0.45543,0.45611,0.45678,0.45746,0.45813,0.45881,0.45948,0.46016,0.46083,0.46151,0.46218,
-0.46285,0.46353,0.4642,0.46487,0.46555,0.46622,0.4669,0.46757,0.46824,0.46891,0.46959,0.47026,0.47093,0.47161,0.47228,0.47295,0.47362,0.47429,0.47497,0.47564,0.47631,0.47698,0.47765,0.47832,0.47899,0.47967,0.48034,0.48101,0.48168,0.48235,0.48302,0.48369,0.48436,0.48503,0.4857,0.48637,0.48704,0.48771,0.48838,0.48905,0.48971,0.49038,0.49105,0.49172,0.49239,0.49306,0.49373,0.49439,0.49506,0.49573,0.4964,0.49706,0.49773,0.4984,0.49907,0.49973,0.5004,0.50106,0.50173,0.5024,0.50306,0.50373,0.50439,0.50506,0.50573,0.50639,0.50706,0.50772,0.50838,0.50905,0.50971,0.51038,0.51104,0.51171,0.51237,0.51303,0.5137,0.51436,0.51502,0.51568,0.51635,0.51701,0.51767,0.51833,0.519,0.51966,0.52032,0.52098,0.52164,0.5223,0.52296,0.52362,0.52428,0.52494,0.5256,0.52626,0.52692,0.52758,0.52824,0.5289,
-0.52956,0.53022,0.53088,0.53154,0.53219,0.53285,0.53351,0.53417,0.53482,0.53548,0.53614,0.5368,0.53745,0.53811,0.53876,0.53942,0.54008,0.54073,0.54139,0.54204,0.5427,0.54335,0.54401,0.54466,0.54531,0.54597,0.54662,0.54727,0.54793,0.54858,0.54923,0.54989,0.55054,0.55119,0.55184,0.55249,0.55315,0.5538,0.55445,0.5551,0.55575,0.5564,0.55705,0.5577,0.55835,0.559,0.55965,0.5603,0.56095,0.56159,0.56224,0.56289,0.56354,0.56419,0.56483,0.56548,0.56613,0.56677,0.56742,0.56807,0.56871,0.56936,0.57,0.57065,0.5713,0.57194,0.57258,0.57323,0.57387,0.57452,0.57516,0.5758,0.57645,0.57709,0.57773,0.57838,0.57902,0.57966,0.5803,0.58094,0.58158,0.58222,0.58287,0.58351,0.58415,0.58479,0.58543,0.58607,0.5867,0.58734,0.58798,0.58862,0.58926,0.5899,0.59053,0.59117,0.59181,0.59245,0.59308,0.59372,
-0.59436,0.59499,0.59563,0.59626,0.5969,0.59753,0.59817,0.5988,0.59943,0.60007,0.6007,0.60134,0.60197,0.6026,0.60323,0.60387,0.6045,0.60513,0.60576,0.60639,0.60702,0.60765,0.60828,0.60891,0.60954,0.61017,0.6108,0.61143,0.61206,0.61269,0.61332,0.61394,0.61457,0.6152,0.61583,0.61645,0.61708,0.61771,0.61833,0.61896,0.61958,0.62021,0.62083,0.62146,0.62208,0.62271,0.62333,0.62395,0.62458,0.6252,0.62582,0.62644,0.62707,0.62769,0.62831,0.62893,0.62955,0.63017,0.63079,0.63141,0.63203,0.63265,0.63327,0.63389,0.63451,0.63513,0.63574,0.63636,0.63698,0.6376,0.63821,0.63883,0.63945,0.64006,0.64068,0.64129,0.64191,0.64252,0.64314,0.64375,0.64437,0.64498,0.64559,0.6462,0.64682,0.64743,0.64804,0.64865,0.64927,0.64988,0.65049,0.6511,0.65171,0.65232,0.65293,0.65354,0.65415,0.65475,0.65536,0.65597,
-0.65658,0.65719,0.65779,0.6584,0.65901,0.65961,0.66022,0.66083,0.66143,0.66204,0.66264,0.66325,0.66385,0.66445,0.66506,0.66566,0.66626,0.66687,0.66747,0.66807,0.66867,0.66927,0.66987,0.67047,0.67107,0.67167,0.67227,0.67287,0.67347,0.67407,0.67467,0.67527,0.67587,0.67646,0.67706,0.67766,0.67825,0.67885,0.67945,0.68004,0.68064,0.68123,0.68183,0.68242,0.68302,0.68361,0.6842,0.6848,0.68539,0.68598,0.68657,0.68717,0.68776,0.68835,0.68894,0.68953,0.69012,0.69071,0.6913,0.69189,0.69248,0.69307,0.69365,0.69424,0.69483,0.69542,0.696,0.69659,0.69718,0.69776,0.69835,0.69893,0.69952,0.7001,0.70069,0.70127,0.70185,0.70244,0.70302,0.7036,0.70419,0.70477,0.70535,0.70593,0.70651,0.70709,0.70767,0.70825,0.70883,0.70941,0.70999,0.71057,0.71115,0.71173,0.7123,0.71288,0.71346,0.71403,0.71461,0.71519,
-0.71576,0.71634,0.71691,0.71749,0.71806,0.71863,0.71921,0.71978,0.72035,0.72093,0.7215,0.72207,0.72264,0.72321,0.72378,0.72436,0.72493,0.7255,0.72606,0.72663,0.7272,0.72777,0.72834,0.72891,0.72947,0.73004,0.73061,0.73117,0.73174,0.73231,0.73287,0.73344,0.734,0.73457,0.73513,0.73569,0.73626,0.73682,0.73738,0.73795,0.73851,0.73907,0.73963,0.74019,0.74075,0.74131,0.74187,0.74243,0.74299,0.74355,0.74411,0.74467,0.74522,0.74578,0.74634,0.7469,0.74745,0.74801,0.74856,0.74912,0.74967,0.75023,0.75078,0.75134,0.75189,0.75244,0.753,0.75355,0.7541,0.75465,0.7552,0.75576,0.75631,0.75686,0.75741,0.75796,0.75851,0.75906,0.7596,0.76015,0.7607,0.76125,0.7618,0.76234,0.76289,0.76344,0.76398,0.76453,0.76507,0.76562,0.76616,0.76671,0.76725,0.76779,0.76834,0.76888,0.76942,0.76996,0.7705,0.77105,
-0.77159,0.77213,0.77267,0.77321,0.77375,0.77429,0.77482,0.77536,0.7759,0.77644,0.77698,0.77751,0.77805,0.77859,0.77912,0.77966,0.78019,0.78073,0.78126,0.7818,0.78233,0.78286,0.7834,0.78393,0.78446,0.78499,0.78553,0.78606,0.78659,0.78712,0.78765,0.78818,0.78871,0.78924,0.78977,0.79029,0.79082,0.79135,0.79188,0.79241,0.79293,0.79346,0.79398,0.79451,0.79503,0.79556,0.79608,0.79661,0.79713,0.79766,0.79818,0.7987,0.79922,0.79975,0.80027,0.80079,0.80131,0.80183,0.80235,0.80287,0.80339,0.80391,0.80443,0.80495,0.80546,0.80598,0.8065,0.80701,0.80753,0.80805,0.80856,0.80908,0.80959,0.81011,0.81062,0.81114,0.81165,0.81216,0.81268,0.81319,0.8137,0.81421,0.81473,0.81524,0.81575,0.81626,0.81677,0.81728,0.81779,0.8183,0.8188,0.81931,0.81982,0.82033,0.82083,0.82134,0.82185,0.82235,0.82286,0.82337,
-0.82387,0.82437,0.82488,0.82538,0.82589,0.82639,0.82689,0.82739,0.8279,0.8284,0.8289,0.8294,0.8299,0.8304,0.8309,0.8314,0.8319,0.8324,0.8329,0.8334,0.83389,0.83439,0.83489,0.83538,0.83588,0.83638,0.83687,0.83737,0.83786,0.83836,0.83885,0.83934,0.83984,0.84033,0.84082,0.84131,0.84181,0.8423,0.84279,0.84328,0.84377,0.84426,0.84475,0.84524,0.84573,0.84622,0.84671,0.84719,0.84768,0.84817,0.84865,0.84914,0.84963,0.85011,0.8506,0.85108,0.85157,0.85205,0.85254,0.85302,0.8535,0.85399,0.85447,0.85495,0.85543,0.85591,0.85639,0.85687,0.85735,0.85783,0.85831,0.85879,0.85927,0.85975,0.86023,0.86071,0.86118,0.86166,0.86214,0.86261,0.86309,0.86356,0.86404,0.86451,0.86499,0.86546,0.86594,0.86641,0.86688,0.86736,0.86783,0.8683,0.86877,0.86924,0.86971,0.87018,0.87065,0.87112,0.87159,0.87206,
-0.87253,0.873,0.87347,0.87393,0.8744,0.87487,0.87533,0.8758,0.87627,0.87673,0.8772,0.87766,0.87813,0.87859,0.87905,0.87952,0.87998,0.88044,0.8809,0.88137,0.88183,0.88229,0.88275,0.88321,0.88367,0.88413,0.88459,0.88505,0.8855,0.88596,0.88642,0.88688,0.88733,0.88779,0.88825,0.8887,0.88916,0.88961,0.89007,0.89052,0.89098,0.89143,0.89189,0.89234,0.89279,0.89324,0.8937,0.89415,0.8946,0.89505,0.8955,0.89595,0.8964,0.89685,0.8973,0.89775,0.8982,0.89864,0.89909,0.89954,0.89999,0.90043,0.90088,0.90132,0.90177,0.90221,0.90266,0.9031,0.90355,0.90399,0.90443,0.90488,0.90532,0.90576,0.9062,0.90665,0.90709,0.90753,0.90797,0.90841,0.90885,0.90929,0.90973,0.91017,0.9106,0.91104,0.91148,0.91192,0.91235,0.91279,0.91323,0.91366,0.9141,0.91453,0.91497,0.9154,0.91584,0.91627,0.9167,0.91714,
-0.91757,0.918,0.91843,0.91886,0.9193,0.91973,0.92016,0.92059,0.92102,0.92145,0.92188,0.9223,0.92273,0.92316,0.92359,0.92402,0.92444,0.92487,0.9253,0.92572,0.92615,0.92657,0.927,0.92742,0.92785,0.92827,0.92869,0.92912,0.92954,0.92996,0.93038,0.9308,0.93123,0.93165,0.93207,0.93249,0.93291,0.93333,0.93375,0.93416,0.93458,0.935,0.93542,0.93584,0.93625,0.93667,0.93709,0.9375,0.93792,0.93833,0.93875,0.93916,0.93958,0.93999,0.9404,0.94082,0.94123,0.94164,0.94206,0.94247,0.94288,0.94329,0.9437,0.94411,0.94452,0.94493,0.94534,0.94575,0.94616,0.94657,0.94697,0.94738,0.94779,0.9482,0.9486,0.94901,0.94941,0.94982,0.95023,0.95063,0.95103,0.95144,0.95184,0.95225,0.95265,0.95305,0.95345,0.95386,0.95426,0.95466,0.95506,0.95546,0.95586,0.95626,0.95666,0.95706,0.95746,0.95786,0.95826,0.95865,
-0.95905,0.95945,0.95985,0.96024,0.96064,0.96103,0.96143,0.96182,0.96222,0.96261,0.96301,0.9634,0.9638,0.96419,0.96458,0.96497,0.96537,0.96576,0.96615,0.96654,0.96693,0.96732,0.96771,0.9681,0.96849,0.96888,0.96927,0.96966,0.97004,0.97043,0.97082,0.97121,0.97159,0.97198,0.97236,0.97275,0.97313,0.97352,0.9739,0.97429,0.97467,0.97506,0.97544,0.97582,0.9762,0.97659,0.97697,0.97735,0.97773,0.97811,0.97849,0.97887,0.97925,0.97963,0.98001,0.98039,0.98077,0.98115,0.98152,0.9819,0.98228,0.98266,0.98303,0.98341,0.98378,0.98416,0.98453,0.98491,0.98528,0.98566,0.98603,0.98641,0.98678,0.98715,0.98752,0.9879,0.98827,0.98864,0.98901,0.98938,0.98975,0.99012,0.99049,0.99086,0.99123,0.9916,0.99197,0.99234,0.9927,0.99307,0.99344,0.9938,0.99417,0.99454,0.9949,0.99527,0.99563,0.996,0.99636,0.99673,
-0.99709,0.99745,0.99782,0.99818,0.99854,0.9989,0.99927,0.99963,0.99999,1.00035,1.00071,1.00107,1.00143,1.00179,1.00215,1.00251,1.00287,1.00323,1.00358,1.00394,1.0043,1.00465,1.00501,1.00537,1.00572,1.00608,1.00643,1.00679,1.00714,1.0075,1.00785,1.00821,1.00856,1.00891,1.00927,1.00962,1.00997,1.01032,1.01067,1.01102,1.01137,1.01172,1.01208,1.01242,1.01277,1.01312,1.01347,1.01382,1.01417,1.01452,1.01486,1.01521,1.01556,1.0159,1.01625,1.0166,1.01694,1.01729,1.01763,1.01798,1.01832,1.01867,1.01901,1.01935,1.0197,1.02004,1.02038,1.02072,1.02107,1.02141,1.02175,1.02209,1.02243,1.02277,1.02311,1.02345,1.02379,1.02413,1.02447,1.0248,1.02514,1.02548,1.02582,1.02615,1.02649,1.02683,1.02716,1.0275,1.02783,1.02817,1.0285,1.02884,1.02917,1.02951,1.02984,1.03017,1.03051,1.03084,1.03117,1.0315,
-1.03183,1.03217,1.0325,1.03283,1.03316,1.03349,1.03382,1.03415,1.03448,1.03481,1.03513,1.03546,1.03579,1.03612,1.03645,1.03677,1.0371,1.03743,1.03775,1.03808,1.0384,1.03873,1.03905,1.03938,1.0397,1.04003,1.04035,1.04067,1.041,1.04132,1.04164,1.04196,1.04228,1.04261,1.04293,1.04325,1.04357,1.04389,1.04421,1.04453,1.04485,1.04517,1.04549,1.0458,1.04612,1.04644,1.04676,1.04707,1.04739,1.04771,1.04802,1.04834,1.04866,1.04897,1.04929,1.0496,1.04992,1.05023,1.05054,1.05086,1.05117,1.05148,1.0518,1.05211,1.05242,1.05273,1.05304,1.05336,1.05367,1.05398,1.05429,1.0546,1.05491,1.05522,1.05553,1.05583,1.05614,1.05645,1.05676,1.05707,1.05737,1.05768,1.05799,1.05829,1.0586,1.05891,1.05921,1.05952,1.05982,1.06013,1.06043,1.06073,1.06104,1.06134,1.06164,1.06195,1.06225,1.06255,1.06285,1.06316,
-1.06346,1.06376,1.06406,1.06436,1.06466,1.06496,1.06526,1.06556,1.06586,1.06616,1.06645,1.06675,1.06705,1.06735,1.06764,1.06794,1.06824,1.06853,1.06883,1.06913,1.06942,1.06972,1.07001,1.07031,1.0706,1.0709,1.07119,1.07148,1.07178,1.07207,1.07236,1.07265,1.07295,1.07324,1.07353,1.07382,1.07411,1.0744,1.07469,1.07498,1.07527,1.07556,1.07585,1.07614,1.07643,1.07672,1.077,1.07729,1.07758,1.07787,1.07815,1.07844,1.07873,1.07901,1.0793,1.07958,1.07987,1.08015,1.08044,1.08072,1.08101,1.08129,1.08158,1.08186,1.08214,1.08242,1.08271,1.08299,1.08327,1.08355,1.08383,1.08411,1.0844,1.08468,1.08496,1.08524,1.08552,1.08579,1.08607,1.08635,1.08663,1.08691,1.08719,1.08746,1.08774,1.08802,1.0883,1.08857,1.08885,1.08912,1.0894,1.08968,1.08995,1.09023,1.0905,1.09077,1.09105,1.09132,1.0916,1.09187,
-1.09214,1.09242,1.09269,1.09296,1.09323,1.0935,1.09377,1.09405,1.09432,1.09459,1.09486,1.09513,1.0954,1.09567,1.09594,1.0962,1.09647,1.09674,1.09701,1.09728,1.09754,1.09781,1.09808,1.09835,1.09861,1.09888,1.09914,1.09941,1.09967,1.09994,1.1002,1.10047,1.10073,1.101,1.10126,1.10153,1.10179,1.10205,1.10231,1.10258,1.10284,1.1031,1.10336,1.10362,1.10388,1.10415,1.10441,1.10467,1.10493,1.10519,1.10545,1.1057,1.10596,1.10622,1.10648,1.10674,1.107,1.10725,1.10751,1.10777,1.10803,1.10828,1.10854,1.1088,1.10905,1.10931,1.10956,1.10982,1.11007,1.11033,1.11058,1.11083,1.11109,1.11134,1.11159,1.11185,1.1121,1.11235,1.11261,1.11286,1.11311,1.11336,1.11361,1.11386,1.11411,1.11436,1.11461,1.11486,1.11511,1.11536,1.11561,1.11586,1.11611,1.11636,1.11661,1.11685,1.1171,1.11735,1.11759,1.11784,
-1.11809,1.11833,1.11858,1.11883,1.11907,1.11932,1.11956,1.11981,1.12005,1.1203,1.12054,1.12078,1.12103,1.12127,1.12151,1.12176,1.122,1.12224,1.12248,1.12272,1.12297,1.12321,1.12345,1.12369,1.12393,1.12417,1.12441,1.12465,1.12489,1.12513,1.12537,1.12561,1.12584,1.12608,1.12632,1.12656,1.1268,1.12703,1.12727,1.12751,1.12774,1.12798,1.12822,1.12845,1.12869,1.12892,1.12916,1.12939,1.12963,1.12986,1.1301,1.13033,1.13056,1.1308,1.13103,1.13126,1.13149,1.13173,1.13196,1.13219,1.13242,1.13265,1.13289,1.13312,1.13335,1.13358,1.13381,1.13404,1.13427,1.1345,1.13473,1.13495,1.13518,1.13541,1.13564,1.13587,1.1361,1.13632,1.13655,1.13678,1.137,1.13723,1.13746,1.13768,1.13791,1.13814,1.13836,1.13859,1.13881,1.13904,1.13926,1.13948,1.13971,1.13993,1.14016,1.14038,1.1406,1.14082,1.14105,1.14127,
-1.14149,1.14171,1.14194,1.14216,1.14238,1.1426,1.14282,1.14304,1.14326,1.14348,1.1437,1.14392,1.14414,1.14436,1.14458,1.1448,1.14501,1.14523,1.14545,1.14567,1.14588,1.1461,1.14632,1.14654,1.14675,1.14697,1.14718,1.1474,1.14762,1.14783,1.14805,1.14826,1.14848,1.14869,1.1489,1.14912,1.14933,1.14955,1.14976,1.14997,1.15019,1.1504,1.15061,1.15082,1.15103,1.15125,1.15146,1.15167,1.15188,1.15209,1.1523,1.15251,1.15272,1.15293,1.15314,1.15335,1.15356,1.15377,1.15398,1.15419,1.1544,1.1546,1.15481,1.15502,1.15523,1.15543,1.15564,1.15585,1.15605,1.15626,1.15647,1.15667,1.15688,1.15708,1.15729,1.15749,1.1577,1.1579,1.15811,1.15831,1.15852,1.15872,1.15892,1.15913,1.15933,1.15953,1.15974,1.15994,1.16014,1.16034,1.16054,1.16075,1.16095,1.16115,1.16135,1.16155,1.16175,1.16195,1.16215,1.16235,
-1.16255,1.16275,1.16295,1.16315,1.16335,1.16354,1.16374,1.16394,1.16414,1.16434,1.16453,1.16473,1.16493,1.16512,1.16532,1.16552,1.16571,1.16591,1.16611,1.1663,1.1665,1.16669,1.16689,1.16708,1.16728,1.16747,1.16766,1.16786,1.16805,1.16824,1.16844,1.16863,1.16882,1.16902,1.16921,1.1694,1.16959,1.16978,1.16998,1.17017,1.17036,1.17055,1.17074,1.17093,1.17112,1.17131,1.1715,1.17169,1.17188,1.17207,1.17226,1.17245,1.17264,1.17282,1.17301,1.1732,1.17339,1.17358,1.17376,1.17395,1.17414,1.17432,1.17451,1.1747,1.17488,1.17507,1.17526,1.17544,1.17563,1.17581,1.176,1.17618,1.17637,1.17655,1.17673,1.17692,1.1771,1.17729,1.17747,1.17765,1.17784,1.17802,1.1782,1.17838,1.17857,1.17875,1.17893,1.17911,1.17929,1.17947,1.17965,1.17983,1.18002,1.1802,1.18038,1.18056,1.18074,1.18092,1.18109,1.18127,
-1.18145,1.18163,1.18181,1.18199,1.18217,1.18234,1.18252,1.1827,1.18288,1.18305,1.18323,1.18341,1.18358,1.18376,1.18394,1.18411,1.18429,1.18447,1.18464,1.18482,1.18499,1.18517,1.18534,1.18552,1.18569,1.18586,1.18604,1.18621,1.18638,1.18656,1.18673,1.1869,1.18708,1.18725,1.18742,1.18759,1.18777,1.18794,1.18811,1.18828,1.18845,1.18862,1.18879,1.18896,1.18914,1.18931,1.18948,1.18965,1.18982,1.18998,1.19015,1.19032,1.19049,1.19066,1.19083,1.191,1.19117,1.19133,1.1915,1.19167,1.19184,1.192,1.19217,1.19234,1.19251,1.19267,1.19284,1.193,1.19317,1.19334,1.1935,1.19367,1.19383,1.194,1.19416,1.19433,1.19449,1.19466,1.19482,1.19498,1.19515,1.19531,1.19547,1.19564,1.1958,1.19596,1.19613,1.19629,1.19645,1.19661,1.19678,1.19694,1.1971,1.19726,1.19742,1.19758,1.19774,1.1979,1.19806,1.19823,
-1.19839,1.19855,1.19871,1.19886,1.19902,1.19918,1.19934,1.1995,1.19966,1.19982,1.19998,1.20014,1.20029,1.20045,1.20061,1.20077,1.20092,1.20108,1.20124,1.20139,1.20155,1.20171,1.20186,1.20202,1.20218,1.20233,1.20249,1.20264,1.2028,1.20295,1.20311,1.20326,1.20342,1.20357,1.20373,1.20388,1.20403,1.20419,1.20434,1.20449,1.20465,1.2048,1.20495,1.2051,1.20526,1.20541,1.20556,1.20571,1.20587,1.20602,1.20617,1.20632,1.20647,1.20662,1.20677,1.20692,1.20707,1.20722,1.20737,1.20752,1.20767,1.20782,1.20797,1.20812,1.20827,1.20842,1.20857,1.20872,1.20886,1.20901,1.20916,1.20931,1.20946,1.2096,1.20975,1.2099,1.21004,1.21019,1.21034,1.21048,1.21063,1.21078,1.21092,1.21107,1.21121,1.21136,1.21151,1.21165,1.2118,1.21194,1.21209,1.21223,1.21237,1.21252,1.21266,1.21281,1.21295,1.21309,1.21324,1.21338,
-1.21352,1.21367,1.21381,1.21395,1.21409,1.21424,1.21438,1.21452,1.21466,1.2148,1.21494,1.21509,1.21523,1.21537,1.21551,1.21565,1.21579,1.21593,1.21607,1.21621,1.21635,1.21649,1.21663,1.21677,1.21691,1.21705,1.21719,1.21732,1.21746,1.2176,1.21774,1.21788,1.21802,1.21815,1.21829,1.21843,1.21857,1.2187,1.21884,1.21898,1.21911,1.21925,1.21939,1.21952,1.21966,1.21979,1.21993,1.22007,1.2202,1.22034,1.22047,1.22061,1.22074,1.22088,1.22101,1.22114,1.22128,1.22141,1.22155,1.22168,1.22181,1.22195,1.22208,1.22221,1.22235,1.22248,1.22261,1.22274,1.22288,1.22301,1.22314,1.22327,1.2234,1.22354,1.22367,1.2238,1.22393,1.22406,1.22419,1.22432,1.22445,1.22458,1.22471,1.22484,1.22497,1.2251,1.22523,1.22536,1.22549,1.22562,1.22575,1.22588,1.22601,1.22614,1.22626,1.22639,1.22652,1.22665,1.22678,1.2269,
-1.22703,1.22716,1.22729,1.22741,1.22754,1.22767,1.22779,1.22792,1.22805,1.22817,1.2283,1.22842,1.22855,1.22868,1.2288,1.22893,1.22905,1.22918,1.2293,1.22943,1.22955,1.22968,1.2298,1.22992,1.23005,1.23017,1.2303,1.23042,1.23054,1.23067,1.23079,1.23091,1.23104,1.23116,1.23128,1.2314,1.23153,1.23165,1.23177,1.23189,1.23201,1.23213,1.23226,1.23238,1.2325,1.23262,1.23274,1.23286,1.23298,1.2331,1.23322,1.23334,1.23346,1.23358,1.2337,1.23382,1.23394,1.23406,1.23418,1.2343,1.23442,1.23454,1.23466,1.23477,1.23489,1.23501,1.23513,1.23525,1.23537,1.23548,1.2356,1.23572,1.23584,1.23595,1.23607,1.23619,1.2363,1.23642,1.23654,1.23665,1.23677,1.23688,1.237,1.23712,1.23723,1.23735,1.23746,1.23758,1.23769,1.23781,1.23792,1.23804,1.23815,1.23827,1.23838,1.2385,1.23861,1.23872,1.23884,1.23895,
-1.23906,1.23918,1.23929,1.2394,1.23952,1.23963,1.23974,1.23985,1.23997,1.24008,1.24019,1.2403,1.24042,1.24053,1.24064,1.24075,1.24086,1.24097,1.24108,1.2412,1.24131,1.24142,1.24153,1.24164,1.24175,1.24186,1.24197,1.24208,1.24219,1.2423,1.24241,1.24252,1.24263,1.24274,1.24285,1.24295,1.24306,1.24317,1.24328,1.24339,1.2435,1.2436,1.24371,1.24382,1.24393,1.24404,1.24414,1.24425,1.24436,1.24447,1.24457,1.24468,1.24479,1.24489,1.245,1.24511,1.24521,1.24532,1.24542,1.24553,1.24564,1.24574,1.24585,1.24595,1.24606,1.24616,1.24627,1.24637,1.24648,1.24658,1.24669,1.24679,1.2469,1.247,1.2471,1.24721,1.24731,1.24742,1.24752,1.24762,1.24773,1.24783,1.24793,1.24803,1.24814,1.24824,1.24834,1.24845,1.24855,1.24865,1.24875,1.24885,1.24896,1.24906,1.24916,1.24926,1.24936,1.24946,1.24956,1.24966,
-1.24977,1.24987,1.24997,1.25007,1.25017,1.25027,1.25037,1.25047,1.25057,1.25067,1.25077,1.25087,1.25097,1.25107,1.25117,1.25126,1.25136,1.25146,1.25156,1.25166,1.25176,1.25186,1.25195,1.25205,1.25215,1.25225,1.25235,1.25244,1.25254,1.25264,1.25274,1.25283,1.25293,1.25303,1.25312,1.25322,1.25332,1.25341,1.25351,1.25361,1.2537,1.2538,1.2539,1.25399,1.25409,1.25418,1.25428,1.25437,1.25447,1.25456,1.25466,1.25475,1.25485,1.25494,1.25504,1.25513,1.25523,1.25532,1.25542,1.25551,1.2556,1.2557,1.25579,1.25588,1.25598,1.25607,1.25616,1.25626,1.25635,1.25644,1.25654,1.25663,1.25672,1.25681,1.25691,1.257,1.25709,1.25718,1.25727,1.25737,1.25746,1.25755,1.25764,1.25773,1.25782,1.25791,1.25801,1.2581,1.25819,1.25828,1.25837,1.25846,1.25855,1.25864,1.25873,1.25882,1.25891,1.259,1.25909,1.25918,
-1.25927,1.25936,1.25945,1.25954,1.25963,1.25971,1.2598,1.25989,1.25998,1.26007,1.26016,1.26025,1.26033,1.26042,1.26051,1.2606,1.26069,1.26077,1.26086,1.26095,1.26104,1.26112,1.26121,1.2613,1.26139,1.26147,1.26156,1.26165,1.26173,1.26182,1.2619,1.26199,1.26208,1.26216,1.26225,1.26233,1.26242,1.26251,1.26259,1.26268,1.26276,1.26285,1.26293,1.26302,1.2631,1.26319,1.26327,1.26336,1.26344,1.26353,1.26361,1.26369,1.26378,1.26386,1.26395,1.26403,1.26411,1.2642,1.26428,1.26436,1.26445,1.26453,1.26461,1.2647,1.26478,1.26486,1.26494,1.26503,1.26511,1.26519,1.26527,1.26536,1.26544,1.26552,1.2656,1.26568,1.26577,1.26585,1.26593,1.26601,1.26609,1.26617,1.26625,1.26633,1.26642,1.2665,1.26658,1.26666,1.26674,1.26682,1.2669,1.26698,1.26706,1.26714,1.26722,1.2673,1.26738,1.26746,1.26754,1.26762,
-1.2677,1.26778,1.26785,1.26793,1.26801,1.26809,1.26817,1.26825,1.26833,1.26841,1.26848,1.26856,1.26864,1.26872,1.2688,1.26887,1.26895,1.26903,1.26911,1.26918,1.26926,1.26934,1.26942,1.26949,1.26957,1.26965,1.26972,1.2698,1.26988,1.26995,1.27003,1.27011,1.27018,1.27026,1.27034,1.27041,1.27049,1.27056,1.27064,1.27072,1.27079,1.27087,1.27094,1.27102,1.27109,1.27117,1.27124,1.27132,1.27139,1.27147,1.27154,1.27162,1.27169,1.27176,1.27184,1.27191,1.27199,1.27206,1.27214,1.27221,1.27228,1.27236,1.27243,1.2725,1.27258,1.27265,1.27272,1.2728,1.27287,1.27294,1.27301,1.27309,1.27316,1.27323,1.27331,1.27338,1.27345,1.27352,1.27359,1.27367,1.27374,1.27381,1.27388,1.27395,1.27403,1.2741,1.27417,1.27424,1.27431,1.27438,1.27445,1.27452,1.27459,1.27467,1.27474,1.27481,1.27488,1.27495,1.27502,1.27509,
-1.27516,1.27523,1.2753,1.27537,1.27544,1.27551,1.27558,1.27565,1.27572,1.27579,1.27586,1.27593,1.27599,1.27606,1.27613,1.2762,1.27627,1.27634,1.27641,1.27648,1.27654,1.27661,1.27668,1.27675,1.27682,1.27689,1.27695,1.27702,1.27709,1.27716,1.27723,1.27729,1.27736,1.27743,1.27749,1.27756,1.27763,1.2777,1.27776,1.27783,1.2779,1.27796,1.27803,1.2781,1.27816,1.27823,1.2783,1.27836,1.27843,1.27849,1.27856,1.27863,1.27869,1.27876,1.27882,1.27889,1.27896,1.27902,1.27909,1.27915,1.27922,1.27928,1.27935,1.27941,1.27948,1.27954,1.27961,1.27967,1.27974,1.2798,1.27986,1.27993,1.27999,1.28006,1.28012,1.28018,1.28025,1.28031,1.28038,1.28044,1.2805,1.28057,1.28063,1.28069,1.28076,1.28082,1.28088,1.28095,1.28101,1.28107,1.28114,1.2812,1.28126,1.28132,1.28139,1.28145,1.28151,1.28157,1.28164,1.2817,
-1.28176,1.28182,1.28188,1.28194,1.28201,1.28207,1.28213,1.28219,1.28225,1.28231,1.28238,1.28244,1.2825,1.28256,1.28262,1.28268,1.28274,1.2828,1.28286,1.28292,1.28298,1.28304,1.2831,1.28317,1.28323,1.28329,1.28335,1.28341,1.28347,1.28353,1.28359,1.28364,1.2837,1.28376,1.28382,1.28388,1.28394,1.284,1.28406,1.28412,1.28418,1.28424,1.2843,1.28436,1.28441,1.28447,1.28453,1.28459,1.28465,1.28471,1.28477,1.28482,1.28488,1.28494,1.285,1.28506,1.28511,1.28517,1.28523,1.28529,1.28534,1.2854,1.28546,1.28552,1.28557,1.28563,1.28569,1.28575,1.2858,1.28586,1.28592,1.28597,1.28603,1.28609,1.28614,1.2862,1.28626,1.28631,1.28637,1.28643,1.28648,1.28654,1.28659,1.28665,1.28671,1.28676,1.28682,1.28687,1.28693,1.28698,1.28704,1.28709,1.28715,1.28721,1.28726,1.28732,1.28737,1.28743,1.28748,1.28754,
-1.28759,1.28764,1.2877,1.28775,1.28781,1.28786,1.28792,1.28797,1.28803,1.28808,1.28813,1.28819,1.28824,1.2883,1.28835,1.2884,1.28846,1.28851,1.28856,1.28862,1.28867,1.28872,1.28878,1.28883,1.28888,1.28894,1.28899,1.28904,1.2891,1.28915,1.2892,1.28925,1.28931,1.28936,1.28941,1.28946,1.28952,1.28957,1.28962,1.28967,1.28973,1.28978,1.28983,1.28988,1.28993,1.28999,1.29004,1.29009,1.29014,1.29019,1.29024,1.29029,1.29035,1.2904,1.29045,1.2905,1.29055,1.2906,1.29065,1.2907,1.29075,1.29081,1.29086,1.29091,1.29096,1.29101,1.29106,1.29111,1.29116,1.29121,1.29126,1.29131,1.29136,1.29141,1.29146,1.29151,1.29156,1.29161,1.29166,1.29171,1.29176,1.29181,1.29186,1.29191,1.29195,1.292,1.29205,1.2921,1.29215,1.2922,1.29225,1.2923,1.29235,1.2924,1.29244,1.29249,1.29254,1.29259,1.29264,1.29269,
-1.29274,1.29278,1.29283,1.29288,1.29293,1.29298,1.29302,1.29307,1.29312,1.29317,1.29321,1.29326,1.29331,1.29336,1.29341,1.29345,1.2935,1.29355,1.29359,1.29364,1.29369,1.29374,1.29378,1.29383,1.29388,1.29392,1.29397,1.29402,1.29406,1.29411,1.29416,1.2942,1.29425,1.2943,1.29434,1.29439,1.29443,1.29448,1.29453,1.29457,1.29462,1.29466,1.29471,1.29476,1.2948,1.29485,1.29489,1.29494,1.29498,1.29503,1.29507,1.29512,1.29517,1.29521,1.29526,1.2953,1.29535,1.29539,1.29544,1.29548,1.29552,1.29557,1.29561,1.29566,1.2957,1.29575,1.29579,1.29584,1.29588,1.29593,1.29597,1.29601,1.29606,1.2961,1.29615,1.29619,1.29623,1.29628,1.29632,1.29637,1.29641,1.29645,1.2965,1.29654,1.29658,1.29663,1.29667,1.29671,1.29676,1.2968,1.29684,1.29689,1.29693,1.29697,1.29701,1.29706,1.2971,1.29714,1.29719,1.29723,
-1.29727,1.29731,1.29736,1.2974,1.29744,1.29748,1.29752,1.29757,1.29761,1.29765,1.29769,1.29774,1.29778,1.29782,1.29786,1.2979,1.29794,1.29799,1.29803,1.29807,1.29811,1.29815,1.29819,1.29823,1.29828,1.29832,1.29836,1.2984,1.29844,1.29848,1.29852,1.29856,1.2986,1.29865,1.29869,1.29873,1.29877,1.29881,1.29885,1.29889,1.29893,1.29897,1.29901,1.29905,1.29909,1.29913,1.29917,1.29921,1.29925,1.29929,1.29933,1.29937,1.29941,1.29945,1.29949,1.29953,1.29957,1.29961,1.29965,1.29969,1.29973,1.29977,1.29981,1.29985,1.29988,1.29992,1.29996,1.3,1.30004,1.30008,1.30012,1.30016,1.3002,1.30024,1.30027,1.30031,1.30035,1.30039,1.30043,1.30047,1.30051,1.30054,1.30058,1.30062,1.30066,1.3007,1.30074,1.30077,1.30081,1.30085,1.30089,1.30093,1.30096,1.301,1.30104,1.30108,1.30111,1.30115,1.30119,1.30123};
-
-constexpr double stored_gamma_values_n4[] = {0.88623,0.88622,0.8862,0.88618,0.88615,0.88612,0.88608,0.88604,0.886,0.88596,0.88591,0.88586,0.88581,0.88576,0.88571,0.88565,0.88559,0.88553,0.88547,0.88541,0.88534,0.88528,0.88521,0.88514,0.88507,0.88499,0.88492,0.88484,0.88477,0.88469,0.88461,0.88453,0.88444,0.88436,0.88428,0.88419,0.8841,0.88401,0.88392,0.88383,0.88374,0.88365,0.88356,0.88346,0.88336,0.88327,0.88317,0.88307,0.88297,0.88287,0.88277,0.88266,0.88256,0.88245,0.88235,0.88224,0.88213,0.88203,0.88192,0.88181,0.88169,0.88158,0.88147,0.88136,0.88124,0.88113,0.88101,0.88089,0.88077,0.88066,0.88054,0.88042,0.8803,0.88017,0.88005,0.87993,0.8798,0.87968,0.87955,0.87943,0.8793,0.87917,0.87904,0.87891,0.87878,0.87865,0.87852,0.87839,0.87826,0.87812,0.87799,0.87786,0.87772,0.87758,0.87745,0.87731,0.87717,0.87703,0.8769,0.87676,
-0.87662,0.87647,0.87633,0.87619,0.87605,0.8759,0.87576,0.87562,0.87547,0.87532,0.87518,0.87503,0.87488,0.87474,0.87459,0.87444,0.87429,0.87414,0.87399,0.87384,0.87368,0.87353,0.87338,0.87322,0.87307,0.87292,0.87276,0.8726,0.87245,0.87229,0.87213,0.87198,0.87182,0.87166,0.8715,0.87134,0.87118,0.87102,0.87086,0.8707,0.87053,0.87037,0.87021,0.87004,0.86988,0.86972,0.86955,0.86938,0.86922,0.86905,0.86889,0.86872,0.86855,0.86838,0.86821,0.86804,0.86787,0.8677,0.86753,0.86736,0.86719,0.86702,0.86685,0.86667,0.8665,0.86633,0.86615,0.86598,0.8658,0.86563,0.86545,0.86528,0.8651,0.86492,0.86475,0.86457,0.86439,0.86421,0.86403,0.86386,0.86368,0.8635,0.86332,0.86313,0.86295,0.86277,0.86259,0.86241,0.86222,0.86204,0.86186,0.86167,0.86149,0.86131,0.86112,0.86094,0.86075,0.86056,0.86038,0.86019,
-0.86,0.85982,0.85963,0.85944,0.85925,0.85906,0.85887,0.85868,0.85849,0.8583,0.85811,0.85792,0.85773,0.85754,0.85735,0.85716,0.85696,0.85677,0.85658,0.85638,0.85619,0.856,0.8558,0.85561,0.85541,0.85522,0.85502,0.85482,0.85463,0.85443,0.85423,0.85404,0.85384,0.85364,0.85344,0.85324,0.85304,0.85285,0.85265,0.85245,0.85225,0.85205,0.85185,0.85164,0.85144,0.85124,0.85104,0.85084,0.85064,0.85043,0.85023,0.85003,0.84982,0.84962,0.84941,0.84921,0.84901,0.8488,0.8486,0.84839,0.84818,0.84798,0.84777,0.84757,0.84736,0.84715,0.84694,0.84674,0.84653,0.84632,0.84611,0.8459,0.84569,0.84549,0.84528,0.84507,0.84486,0.84465,0.84444,0.84423,0.84401,0.8438,0.84359,0.84338,0.84317,0.84296,0.84274,0.84253,0.84232,0.8421,0.84189,0.84168,0.84146,0.84125,0.84104,0.84082,0.84061,0.84039,0.84018,0.83996,
-0.83974,0.83953,0.83931,0.8391,0.83888,0.83866,0.83845,0.83823,0.83801,0.83779,0.83757,0.83736,0.83714,0.83692,0.8367,0.83648,0.83626,0.83604,0.83582,0.8356,0.83538,0.83516,0.83494,0.83472,0.8345,0.83428,0.83406,0.83384,0.83361,0.83339,0.83317,0.83295,0.83273,0.8325,0.83228,0.83206,0.83183,0.83161,0.83139,0.83116,0.83094,0.83071,0.83049,0.83026,0.83004,0.82981,0.82959,0.82936,0.82914,0.82891,0.82869,0.82846,0.82823,0.82801,0.82778,0.82755,0.82733,0.8271,0.82687,0.82664,0.82641,0.82619,0.82596,0.82573,0.8255,0.82527,0.82504,0.82481,0.82458,0.82436,0.82413,0.8239,0.82367,0.82344,0.82321,0.82298,0.82274,0.82251,0.82228,0.82205,0.82182,0.82159,0.82136,0.82113,0.82089,0.82066,0.82043,0.8202,0.81996,0.81973,0.8195,0.81926,0.81903,0.8188,0.81856,0.81833,0.8181,0.81786,0.81763,0.81739,
-0.81716,0.81693,0.81669,0.81646,0.81622,0.81599,0.81575,0.81551,0.81528,0.81504,0.81481,0.81457,0.81433,0.8141,0.81386,0.81363,0.81339,0.81315,0.81291,0.81268,0.81244,0.8122,0.81196,0.81173,0.81149,0.81125,0.81101,0.81077,0.81054,0.8103,0.81006,0.80982,0.80958,0.80934,0.8091,0.80886,0.80862,0.80838,0.80814,0.8079,0.80766,0.80742,0.80718,0.80694,0.8067,0.80646,0.80622,0.80598,0.80574,0.8055,0.80526,0.80501,0.80477,0.80453,0.80429,0.80405,0.80381,0.80356,0.80332,0.80308,0.80284,0.80259,0.80235,0.80211,0.80187,0.80162,0.80138,0.80114,0.80089,0.80065,0.80041,0.80016,0.79992,0.79967,0.79943,0.79919,0.79894,0.7987,0.79845,0.79821,0.79796,0.79772,0.79747,0.79723,0.79698,0.79674,0.79649,0.79625,0.796,0.79576,0.79551,0.79526,0.79502,0.79477,0.79453,0.79428,0.79403,0.79379,0.79354,0.79329,
-0.79305,0.7928,0.79255,0.79231,0.79206,0.79181,0.79156,0.79132,0.79107,0.79082,0.79057,0.79033,0.79008,0.78983,0.78958,0.78933,0.78909,0.78884,0.78859,0.78834,0.78809,0.78784,0.7876,0.78735,0.7871,0.78685,0.7866,0.78635,0.7861,0.78585,0.7856,0.78535,0.7851,0.78485,0.7846,0.78435,0.7841,0.78385,0.7836,0.78335,0.7831,0.78285,0.7826,0.78235,0.7821,0.78185,0.7816,0.78135,0.7811,0.78085,0.7806,0.78034,0.78009,0.77984,0.77959,0.77934,0.77909,0.77884,0.77858,0.77833,0.77808,0.77783,0.77758,0.77732,0.77707,0.77682,0.77657,0.77632,0.77606,0.77581,0.77556,0.77531,0.77505,0.7748,0.77455,0.77429,0.77404,0.77379,0.77354,0.77328,0.77303,0.77278,0.77252,0.77227,0.77202,0.77176,0.77151,0.77126,0.771,0.77075,0.77049,0.77024,0.76999,0.76973,0.76948,0.76922,0.76897,0.76872,0.76846,0.76821,
-0.76795,0.7677,0.76744,0.76719,0.76693,0.76668,0.76642,0.76617,0.76592,0.76566,0.76541,0.76515,0.7649,0.76464,0.76438,0.76413,0.76387,0.76362,0.76336,0.76311,0.76285,0.7626,0.76234,0.76209,0.76183,0.76157,0.76132,0.76106,0.76081,0.76055,0.7603,0.76004,0.75978,0.75953,0.75927,0.75901,0.75876,0.7585,0.75825,0.75799,0.75773,0.75748,0.75722,0.75696,0.75671,0.75645,0.75619,0.75594,0.75568,0.75542,0.75517,0.75491,0.75465,0.75439,0.75414,0.75388,0.75362,0.75337,0.75311,0.75285,0.75259,0.75234,0.75208,0.75182,0.75156,0.75131,0.75105,0.75079,0.75053,0.75028,0.75002,0.74976,0.7495,0.74925,0.74899,0.74873,0.74847,0.74821,0.74796,0.7477,0.74744,0.74718,0.74692,0.74667,0.74641,0.74615,0.74589,0.74563,0.74538,0.74512,0.74486,0.7446,0.74434,0.74408,0.74383,0.74357,0.74331,0.74305,0.74279,0.74253,
-0.74227,0.74202,0.74176,0.7415,0.74124,0.74098,0.74072,0.74046,0.7402,0.73994,0.73969,0.73943,0.73917,0.73891,0.73865,0.73839,0.73813,0.73787,0.73761,0.73735,0.7371,0.73684,0.73658,0.73632,0.73606,0.7358,0.73554,0.73528,0.73502,0.73476,0.7345,0.73424,0.73398,0.73372,0.73347,0.73321,0.73295,0.73269,0.73243,0.73217,0.73191,0.73165,0.73139,0.73113,0.73087,0.73061,0.73035,0.73009,0.72983,0.72957,0.72931,0.72905,0.72879,0.72853,0.72827,0.72801,0.72775,0.72749,0.72723,0.72697,0.72671,0.72645,0.72619,0.72593,0.72567,0.72541,0.72515,0.72489,0.72463,0.72437,0.72412,0.72386,0.7236,0.72334,0.72307,0.72281,0.72255,0.72229,0.72203,0.72177,0.72151,0.72125,0.72099,0.72073,0.72047,0.72021,0.71995,0.71969,0.71943,0.71917,0.71891,0.71865,0.71839,0.71813,0.71787,0.71761,0.71735,0.71709,0.71683,0.71657,
-0.71631,0.71605,0.71579,0.71553,0.71527,0.71501,0.71475,0.71449,0.71423,0.71397,0.71371,0.71345,0.71319,0.71293,0.71267,0.71241,0.71215,0.71189,0.71163,0.71137,0.71111,0.71085,0.71059,0.71033,0.71007,0.70981,0.70954,0.70928,0.70902,0.70876,0.7085,0.70824,0.70798,0.70772,0.70746,0.7072,0.70694,0.70668,0.70642,0.70616,0.7059,0.70564,0.70538,0.70512,0.70486,0.7046,0.70434,0.70408,0.70382,0.70356,0.7033,0.70304,0.70278,0.70252,0.70226,0.702,0.70174,0.70148,0.70122,0.70096,0.7007,0.70044,0.70018,0.69992,0.69966,0.6994,0.69914,0.69888,0.69862,0.69836,0.6981,0.69784,0.69758,0.69732,0.69706,0.6968,0.69654,0.69628,0.69602,0.69576,0.6955,0.69524,0.69498,0.69472,0.69446,0.6942,0.69394,0.69368,0.69342,0.69316,0.6929,0.69264,0.69238,0.69212,0.69186,0.6916,0.69134,0.69108,0.69082,0.69056,
-0.6903,0.69004,0.68978,0.68952,0.68926,0.689,0.68874,0.68848,0.68822,0.68796,0.6877,0.68744,0.68718,0.68692,0.68666,0.6864,0.68614,0.68588,0.68562,0.68537,0.68511,0.68485,0.68459,0.68433,0.68407,0.68381,0.68355,0.68329,0.68303,0.68277,0.68251,0.68225,0.68199,0.68173,0.68148,0.68122,0.68096,0.6807,0.68044,0.68018,0.67992,0.67966,0.6794,0.67914,0.67888,0.67862,0.67837,0.67811,0.67785,0.67759,0.67733,0.67707,0.67681,0.67655,0.67629,0.67604,0.67578,0.67552,0.67526,0.675,0.67474,0.67448,0.67422,0.67397,0.67371,0.67345,0.67319,0.67293,0.67267,0.67242,0.67216,0.6719,0.67164,0.67138,0.67112,0.67086,0.67061,0.67035,0.67009,0.66983,0.66957,0.66932,0.66906,0.6688,0.66854,0.66828,0.66802,0.66777,0.66751,0.66725,0.66699,0.66673,0.66648,0.66622,0.66596,0.6657,0.66545,0.66519,0.66493,0.66467,
-0.66441,0.66416,0.6639,0.66364,0.66338,0.66313,0.66287,0.66261,0.66235,0.6621,0.66184,0.66158,0.66132,0.66107,0.66081,0.66055,0.6603,0.66004,0.65978,0.65952,0.65927,0.65901,0.65875,0.6585,0.65824,0.65798,0.65772,0.65747,0.65721,0.65695,0.6567,0.65644,0.65618,0.65593,0.65567,0.65541,0.65516,0.6549,0.65464,0.65439,0.65413,0.65387,0.65362,0.65336,0.6531,0.65285,0.65259,0.65234,0.65208,0.65182,0.65157,0.65131,0.65106,0.6508,0.65054,0.65029,0.65003,0.64978,0.64952,0.64926,0.64901,0.64875,0.6485,0.64824,0.64798,0.64773,0.64747,0.64722,0.64696,0.64671,0.64645,0.6462,0.64594,0.64568,0.64543,0.64517,0.64492,0.64466,0.64441,0.64415,0.6439,0.64364,0.64339,0.64313,0.64288,0.64262,0.64237,0.64211,0.64186,0.6416,0.64135,0.64109,0.64084,0.64059,0.64033,0.64008,0.63982,0.63957,0.63931,0.63906,
-0.6388,0.63855,0.6383,0.63804,0.63779,0.63753,0.63728,0.63702,0.63677,0.63652,0.63626,0.63601,0.63575,0.6355,0.63525,0.63499,0.63474,0.63449,0.63423,0.63398,0.63372,0.63347,0.63322,0.63296,0.63271,0.63246,0.6322,0.63195,0.6317,0.63144,0.63119,0.63094,0.63069,0.63043,0.63018,0.62993,0.62967,0.62942,0.62917,0.62891,0.62866,0.62841,0.62816,0.6279,0.62765,0.6274,0.62715,0.62689,0.62664,0.62639,0.62614,0.62588,0.62563,0.62538,0.62513,0.62488,0.62462,0.62437,0.62412,0.62387,0.62362,0.62336,0.62311,0.62286,0.62261,0.62236,0.62211,0.62185,0.6216,0.62135,0.6211,0.62085,0.6206,0.62035,0.62009,0.61984,0.61959,0.61934,0.61909,0.61884,0.61859,0.61834,0.61809,0.61784,0.61758,0.61733,0.61708,0.61683,0.61658,0.61633,0.61608,0.61583,0.61558,0.61533,0.61508,0.61483,0.61458,0.61433,0.61408,0.61383,
-0.61358,0.61333,0.61308,0.61283,0.61258,0.61233,0.61208,0.61183,0.61158,0.61133,0.61108,0.61083,0.61058,0.61033,0.61008,0.60983,0.60958,0.60934,0.60909,0.60884,0.60859,0.60834,0.60809,0.60784,0.60759,0.60734,0.60709,0.60685,0.6066,0.60635,0.6061,0.60585,0.6056,0.60535,0.60511,0.60486,0.60461,0.60436,0.60411,0.60386,0.60362,0.60337,0.60312,0.60287,0.60263,0.60238,0.60213,0.60188,0.60163,0.60139,0.60114,0.60089,0.60064,0.6004,0.60015,0.5999,0.59965,0.59941,0.59916,0.59891,0.59867,0.59842,0.59817,0.59793,0.59768,0.59743,0.59718,0.59694,0.59669,0.59644,0.5962,0.59595,0.59571,0.59546,0.59521,0.59497,0.59472,0.59447,0.59423,0.59398,0.59374,0.59349,0.59324,0.593,0.59275,0.59251,0.59226,0.59202,0.59177,0.59152,0.59128,0.59103,0.59079,0.59054,0.5903,0.59005,0.58981,0.58956,0.58932,0.58907,
-0.58883,0.58858,0.58834,0.58809,0.58785,0.5876,0.58736,0.58711,0.58687,0.58663,0.58638,0.58614,0.58589,0.58565,0.5854,0.58516,0.58492,0.58467,0.58443,0.58418,0.58394,0.5837,0.58345,0.58321,0.58297,0.58272,0.58248,0.58224,0.58199,0.58175,0.58151,0.58126,0.58102,0.58078,0.58053,0.58029,0.58005,0.5798,0.57956,0.57932,0.57908,0.57883,0.57859,0.57835,0.57811,0.57786,0.57762,0.57738,0.57714,0.57689,0.57665,0.57641,0.57617,0.57593,0.57568,0.57544,0.5752,0.57496,0.57472,0.57447,0.57423,0.57399,0.57375,0.57351,0.57327,0.57303,0.57279,0.57254,0.5723,0.57206,0.57182,0.57158,0.57134,0.5711,0.57086,0.57062,0.57038,0.57014,0.5699,0.56965,0.56941,0.56917,0.56893,0.56869,0.56845,0.56821,0.56797,0.56773,0.56749,0.56725,0.56701,0.56677,0.56653,0.56629,0.56606,0.56582,0.56558,0.56534,0.5651,0.56486,
-0.56462,0.56438,0.56414,0.5639,0.56366,0.56342,0.56319,0.56295,0.56271,0.56247,0.56223,0.56199,0.56175,0.56151,0.56128,0.56104,0.5608,0.56056,0.56032,0.56009,0.55985,0.55961,0.55937,0.55913,0.5589,0.55866,0.55842,0.55818,0.55795,0.55771,0.55747,0.55723,0.557,0.55676,0.55652,0.55628,0.55605,0.55581,0.55557,0.55534,0.5551,0.55486,0.55463,0.55439,0.55415,0.55392,0.55368,0.55344,0.55321,0.55297,0.55274,0.5525,0.55226,0.55203,0.55179,0.55156,0.55132,0.55108,0.55085,0.55061,0.55038,0.55014,0.54991,0.54967,0.54944,0.5492,0.54897,0.54873,0.5485,0.54826,0.54803,0.54779,0.54756,0.54732,0.54709,0.54685,0.54662,0.54638,0.54615,0.54591,0.54568,0.54545,0.54521,0.54498,0.54474,0.54451,0.54428,0.54404,0.54381,0.54357,0.54334,0.54311,0.54287,0.54264,0.54241,0.54217,0.54194,0.54171,0.54147,0.54124,
-0.54101,0.54077,0.54054,0.54031,0.54008,0.53984,0.53961,0.53938,0.53915,0.53891,0.53868,0.53845,0.53822,0.53798,0.53775,0.53752,0.53729,0.53706,0.53682,0.53659,0.53636,0.53613,0.5359,0.53566,0.53543,0.5352,0.53497,0.53474,0.53451,0.53428,0.53405,0.53381,0.53358,0.53335,0.53312,0.53289,0.53266,0.53243,0.5322,0.53197,0.53174,0.53151,0.53128,0.53105,0.53082,0.53059,0.53036,0.53013,0.5299,0.52967,0.52944,0.52921,0.52898,0.52875,0.52852,0.52829,0.52806,0.52783,0.5276,0.52737,0.52714,0.52691,0.52668,0.52646,0.52623,0.526,0.52577,0.52554,0.52531,0.52508,0.52486,0.52463,0.5244,0.52417,0.52394,0.52371,0.52349,0.52326,0.52303,0.5228,0.52257,0.52235,0.52212,0.52189,0.52166,0.52144,0.52121,0.52098,0.52075,0.52053,0.5203,0.52007,0.51985,0.51962,0.51939,0.51916,0.51894,0.51871,0.51848,0.51826,
-0.51803,0.5178,0.51758,0.51735,0.51713,0.5169,0.51667,0.51645,0.51622,0.516,0.51577,0.51554,0.51532,0.51509,0.51487,0.51464,0.51442,0.51419,0.51397,0.51374,0.51352,0.51329,0.51307,0.51284,0.51262,0.51239,0.51217,0.51194,0.51172,0.51149,0.51127,0.51104,0.51082,0.51059,0.51037,0.51015,0.50992,0.5097,0.50947,0.50925,0.50903,0.5088,0.50858,0.50836,0.50813,0.50791,0.50768,0.50746,0.50724,0.50701,0.50679,0.50657,0.50635,0.50612,0.5059,0.50568,0.50545,0.50523,0.50501,0.50479,0.50456,0.50434,0.50412,0.5039,0.50367,0.50345,0.50323,0.50301,0.50279,0.50256,0.50234,0.50212,0.5019,0.50168,0.50146,0.50124,0.50101,0.50079,0.50057,0.50035,0.50013,0.49991,0.49969,0.49947,0.49925,0.49902,0.4988,0.49858,0.49836,0.49814,0.49792,0.4977,0.49748,0.49726,0.49704,0.49682,0.4966,0.49638,0.49616,0.49594,
-0.49572,0.4955,0.49528,0.49506,0.49484,0.49462,0.4944,0.49419,0.49397,0.49375,0.49353,0.49331,0.49309,0.49287,0.49265,0.49243,0.49222,0.492,0.49178,0.49156,0.49134,0.49112,0.49091,0.49069,0.49047,0.49025,0.49003,0.48981,0.4896,0.48938,0.48916,0.48894,0.48873,0.48851,0.48829,0.48807,0.48786,0.48764,0.48742,0.48721,0.48699,0.48677,0.48656,0.48634,0.48612,0.48591,0.48569,0.48547,0.48526,0.48504,0.48482,0.48461,0.48439,0.48417,0.48396,0.48374,0.48353,0.48331,0.4831,0.48288,0.48266,0.48245,0.48223,0.48202,0.4818,0.48159,0.48137,0.48116,0.48094,0.48073,0.48051,0.4803,0.48008,0.47987,0.47965,0.47944,0.47922,0.47901,0.4788,0.47858,0.47837,0.47815,0.47794,0.47773,0.47751,0.4773,0.47708,0.47687,0.47666,0.47644,0.47623,0.47602,0.4758,0.47559,0.47538,0.47516,0.47495,0.47474,0.47452,0.47431,
-0.4741,0.47389,0.47367,0.47346,0.47325,0.47304,0.47282,0.47261,0.4724,0.47219,0.47197,0.47176,0.47155,0.47134,0.47113,0.47091,0.4707,0.47049,0.47028,0.47007,0.46986,0.46965,0.46943,0.46922,0.46901,0.4688,0.46859,0.46838,0.46817,0.46796,0.46775,0.46754,0.46733,0.46712,0.46691,0.4667,0.46648,0.46627,0.46606,0.46585,0.46564,0.46543,0.46522,0.46501,0.46481,0.4646,0.46439,0.46418,0.46397,0.46376,0.46355,0.46334,0.46313,0.46292,0.46271,0.4625,0.46229,0.46209,0.46188,0.46167,0.46146,0.46125,0.46104,0.46083,0.46063,0.46042,0.46021,0.46,0.45979,0.45959,0.45938,0.45917,0.45896,0.45875,0.45855,0.45834,0.45813,0.45792,0.45772,0.45751,0.4573,0.4571,0.45689,0.45668,0.45648,0.45627,0.45606,0.45585,0.45565,0.45544,0.45524,0.45503,0.45482,0.45462,0.45441,0.4542,0.454,0.45379,0.45359,0.45338,
-0.45317,0.45297,0.45276,0.45256,0.45235,0.45215,0.45194,0.45174,0.45153,0.45133,0.45112,0.45092,0.45071,0.45051,0.4503,0.4501,0.44989,0.44969,0.44948,0.44928,0.44907,0.44887,0.44867,0.44846,0.44826,0.44805,0.44785,0.44765,0.44744,0.44724,0.44704,0.44683,0.44663,0.44642,0.44622,0.44602,0.44582,0.44561,0.44541,0.44521,0.445,0.4448,0.4446,0.44439,0.44419,0.44399,0.44379,0.44358,0.44338,0.44318,0.44298,0.44278,0.44257,0.44237,0.44217,0.44197,0.44177,0.44156,0.44136,0.44116,0.44096,0.44076,0.44056,0.44036,0.44015,0.43995,0.43975,0.43955,0.43935,0.43915,0.43895,0.43875,0.43855,0.43835,0.43815,0.43795,0.43775,0.43754,0.43734,0.43714,0.43694,0.43674,0.43654,0.43634,0.43614,0.43595,0.43575,0.43555,0.43535,0.43515,0.43495,0.43475,0.43455,0.43435,0.43415,0.43395,0.43375,0.43355,0.43336,0.43316,
-0.43296,0.43276,0.43256,0.43236,0.43216,0.43197,0.43177,0.43157,0.43137,0.43117,0.43098,0.43078,0.43058,0.43038,0.43018,0.42999,0.42979,0.42959,0.42939,0.4292,0.429,0.4288,0.42861,0.42841,0.42821,0.42801,0.42782,0.42762,0.42742,0.42723,0.42703,0.42683,0.42664,0.42644,0.42625,0.42605,0.42585,0.42566,0.42546,0.42527,0.42507,0.42487,0.42468,0.42448,0.42429,0.42409,0.4239,0.4237,0.42351,0.42331,0.42311,0.42292,0.42272,0.42253,0.42234,0.42214,0.42195,0.42175,0.42156,0.42136,0.42117,0.42097,0.42078,0.42058,0.42039,0.4202,0.42,0.41981,0.41961,0.41942,0.41923,0.41903,0.41884,0.41865,0.41845,0.41826,0.41807,0.41787,0.41768,0.41749,0.41729,0.4171,0.41691,0.41672,0.41652,0.41633,0.41614,0.41595,0.41575,0.41556,0.41537,0.41518,0.41498,0.41479,0.4146,0.41441,0.41422,0.41402,0.41383,0.41364,
-0.41345,0.41326,0.41307,0.41287,0.41268,0.41249,0.4123,0.41211,0.41192,0.41173,0.41154,0.41135,0.41116,0.41097,0.41077,0.41058,0.41039,0.4102,0.41001,0.40982,0.40963,0.40944,0.40925,0.40906,0.40887,0.40868,0.40849,0.4083,0.40811,0.40792,0.40773,0.40755,0.40736,0.40717,0.40698,0.40679,0.4066,0.40641,0.40622,0.40603,0.40584,0.40566,0.40547,0.40528,0.40509,0.4049,0.40471,0.40452,0.40434,0.40415,0.40396,0.40377,0.40358,0.4034,0.40321,0.40302,0.40283,0.40265,0.40246,0.40227,0.40208,0.4019,0.40171,0.40152,0.40133,0.40115,0.40096,0.40077,0.40059,0.4004,0.40021,0.40003,0.39984,0.39965,0.39947,0.39928,0.3991,0.39891,0.39872,0.39854,0.39835,0.39817,0.39798,0.39779,0.39761,0.39742,0.39724,0.39705,0.39687,0.39668,0.3965,0.39631,0.39613,0.39594,0.39576,0.39557,0.39539,0.3952,0.39502,0.39483,
-0.39465,0.39446,0.39428,0.39409,0.39391,0.39373,0.39354,0.39336,0.39317,0.39299,0.39281,0.39262,0.39244,0.39225,0.39207,0.39189,0.3917,0.39152,0.39134,0.39115,0.39097,0.39079,0.3906,0.39042,0.39024,0.39006,0.38987,0.38969,0.38951,0.38933,0.38914,0.38896,0.38878,0.3886,0.38841,0.38823,0.38805,0.38787,0.38769,0.3875,0.38732,0.38714,0.38696,0.38678,0.3866,0.38641,0.38623,0.38605,0.38587,0.38569,0.38551,0.38533,0.38515,0.38497,0.38479,0.3846,0.38442,0.38424,0.38406,0.38388,0.3837,0.38352,0.38334,0.38316,0.38298,0.3828,0.38262,0.38244,0.38226,0.38208,0.3819,0.38172,0.38154,0.38136,0.38118,0.381,0.38083,0.38065,0.38047,0.38029,0.38011,0.37993,0.37975,0.37957,0.37939,0.37922,0.37904,0.37886,0.37868,0.3785,0.37832,0.37815,0.37797,0.37779,0.37761,0.37743,0.37726,0.37708,0.3769,0.37672,
-0.37654,0.37637,0.37619,0.37601,0.37583,0.37566,0.37548,0.3753,0.37513,0.37495,0.37477,0.3746,0.37442,0.37424,0.37407,0.37389,0.37371,0.37354,0.37336,0.37318,0.37301,0.37283,0.37266,0.37248,0.3723,0.37213,0.37195,0.37178,0.3716,0.37142,0.37125,0.37107,0.3709,0.37072,0.37055,0.37037,0.3702,0.37002,0.36985,0.36967,0.3695,0.36932,0.36915,0.36897,0.3688,0.36862,0.36845,0.36828,0.3681,0.36793,0.36775,0.36758,0.36741,0.36723,0.36706,0.36688,0.36671,0.36654,0.36636,0.36619,0.36602,0.36584,0.36567,0.3655,0.36532,0.36515,0.36498,0.3648,0.36463,0.36446,0.36428,0.36411,0.36394,0.36377,0.36359,0.36342,0.36325,0.36308,0.36291,0.36273,0.36256,0.36239,0.36222,0.36205,0.36187,0.3617,0.36153,0.36136,0.36119,0.36102,0.36084,0.36067,0.3605,0.36033,0.36016,0.35999,0.35982,0.35965,0.35948,0.3593,
-0.35913,0.35896,0.35879,0.35862,0.35845,0.35828,0.35811,0.35794,0.35777,0.3576,0.35743,0.35726,0.35709,0.35692,0.35675,0.35658,0.35641,0.35624,0.35607,0.3559,0.35573,0.35556,0.35539,0.35523,0.35506,0.35489,0.35472,0.35455,0.35438,0.35421,0.35404,0.35387,0.35371,0.35354,0.35337,0.3532,0.35303,0.35286,0.3527,0.35253,0.35236,0.35219,0.35202,0.35186,0.35169,0.35152,0.35135,0.35119,0.35102,0.35085,0.35068,0.35052,0.35035,0.35018,0.35002,0.34985,0.34968,0.34951,0.34935,0.34918,0.34901,0.34885,0.34868,0.34851,0.34835,0.34818,0.34802,0.34785,0.34768,0.34752,0.34735,0.34719,0.34702,0.34685,0.34669,0.34652,0.34636,0.34619,0.34603,0.34586,0.3457,0.34553,0.34536,0.3452,0.34503,0.34487,0.3447,0.34454,0.34437,0.34421,0.34405,0.34388,0.34372,0.34355,0.34339,0.34322,0.34306,0.34289,0.34273,0.34257,
-0.3424,0.34224,0.34207,0.34191,0.34175,0.34158,0.34142,0.34126,0.34109,0.34093,0.34077,0.3406,0.34044,0.34028,0.34011,0.33995,0.33979,0.33963,0.33946,0.3393,0.33914,0.33897,0.33881,0.33865,0.33849,0.33832,0.33816,0.338,0.33784,0.33768,0.33751,0.33735,0.33719,0.33703,0.33687,0.33671,0.33654,0.33638,0.33622,0.33606,0.3359,0.33574,0.33558,0.33541,0.33525,0.33509,0.33493,0.33477,0.33461,0.33445,0.33429,0.33413,0.33397,0.33381,0.33365,0.33349,0.33333,0.33317,0.33301,0.33285,0.33269,0.33253,0.33237,0.33221,0.33205,0.33189,0.33173,0.33157,0.33141,0.33125,0.33109,0.33093,0.33077,0.33061,0.33045,0.33029,0.33013,0.32998,0.32982,0.32966,0.3295,0.32934,0.32918,0.32902,0.32886,0.32871,0.32855,0.32839,0.32823,0.32807,0.32792,0.32776,0.3276,0.32744,0.32728,0.32713,0.32697,0.32681,0.32665,0.3265,
-0.32634,0.32618,0.32602,0.32587,0.32571,0.32555,0.3254,0.32524,0.32508,0.32493,0.32477,0.32461,0.32446,0.3243,0.32414,0.32399,0.32383,0.32367,0.32352,0.32336,0.3232,0.32305,0.32289,0.32274,0.32258,0.32243,0.32227,0.32211,0.32196,0.3218,0.32165,0.32149,0.32134,0.32118,0.32103,0.32087,0.32072,0.32056,0.32041,0.32025,0.3201,0.31994,0.31979,0.31963,0.31948,0.31932,0.31917,0.31902,0.31886,0.31871,0.31855,0.3184,0.31824,0.31809,0.31794,0.31778,0.31763,0.31748,0.31732,0.31717,0.31701,0.31686,0.31671,0.31655,0.3164,0.31625,0.31609,0.31594,0.31579,0.31564,0.31548,0.31533,0.31518,0.31502,0.31487,0.31472,0.31457,0.31441,0.31426,0.31411,0.31396,0.31381,0.31365,0.3135,0.31335,0.3132,0.31305,0.31289,0.31274,0.31259,0.31244,0.31229,0.31214,0.31199,0.31183,0.31168,0.31153,0.31138,0.31123,0.31108,
-0.31093,0.31078,0.31063,0.31047,0.31032,0.31017,0.31002,0.30987,0.30972,0.30957,0.30942,0.30927,0.30912,0.30897,0.30882,0.30867,0.30852,0.30837,0.30822,0.30807,0.30792,0.30777,0.30762,0.30747,0.30732,0.30717,0.30702,0.30688,0.30673,0.30658,0.30643,0.30628,0.30613,0.30598,0.30583,0.30568,0.30554,0.30539,0.30524,0.30509,0.30494,0.30479,0.30464,0.3045,0.30435,0.3042,0.30405,0.3039,0.30376,0.30361,0.30346,0.30331,0.30317,0.30302,0.30287,0.30272,0.30258,0.30243,0.30228,0.30213,0.30199,0.30184,0.30169,0.30155,0.3014,0.30125,0.30111,0.30096,0.30081,0.30067,0.30052,0.30037,0.30023,0.30008,0.29993,0.29979,0.29964,0.29949,0.29935,0.2992,0.29906,0.29891,0.29877,0.29862,0.29847,0.29833,0.29818,0.29804,0.29789,0.29775,0.2976,0.29746,0.29731,0.29717,0.29702,0.29688,0.29673,0.29659,0.29644,0.2963,
-0.29615,0.29601,0.29586,0.29572,0.29557,0.29543,0.29529,0.29514,0.295,0.29485,0.29471,0.29456,0.29442,0.29428,0.29413,0.29399,0.29385,0.2937,0.29356,0.29341,0.29327,0.29313,0.29298,0.29284,0.2927,0.29256,0.29241,0.29227,0.29213,0.29198,0.29184,0.2917,0.29156,0.29141,0.29127,0.29113,0.29099,0.29084,0.2907,0.29056,0.29042,0.29027,0.29013,0.28999,0.28985,0.28971,0.28956,0.28942,0.28928,0.28914,0.289,0.28886,0.28871,0.28857,0.28843,0.28829,0.28815,0.28801,0.28787,0.28773,0.28758,0.28744,0.2873,0.28716,0.28702,0.28688,0.28674,0.2866,0.28646,0.28632,0.28618,0.28604,0.2859,0.28576,0.28562,0.28548,0.28534,0.2852,0.28506,0.28492,0.28478,0.28464,0.2845,0.28436,0.28422,0.28408,0.28394,0.2838,0.28366,0.28352,0.28338,0.28324,0.28311,0.28297,0.28283,0.28269,0.28255,0.28241,0.28227,0.28213,
-0.282,0.28186,0.28172,0.28158,0.28144,0.2813,0.28117,0.28103,0.28089,0.28075,0.28061,0.28048,0.28034,0.2802,0.28006,0.27992,0.27979,0.27965,0.27951,0.27937,0.27924,0.2791,0.27896,0.27883,0.27869,0.27855,0.27841,0.27828,0.27814,0.278,0.27787,0.27773,0.27759,0.27746,0.27732,0.27718,0.27705,0.27691,0.27678,0.27664,0.2765,0.27637,0.27623,0.27609,0.27596,0.27582,0.27569,0.27555,0.27542,0.27528,0.27514,0.27501,0.27487,0.27474,0.2746,0.27447,0.27433,0.2742,0.27406,0.27393,0.27379,0.27366,0.27352,0.27339,0.27325,0.27312,0.27298,0.27285,0.27271,0.27258,0.27245,0.27231,0.27218,0.27204,0.27191,0.27177,0.27164,0.27151,0.27137,0.27124,0.2711,0.27097,0.27084,0.2707,0.27057,0.27044,0.2703,0.27017,0.27004,0.2699,0.26977,0.26964,0.2695,0.26937,0.26924,0.26911,0.26897,0.26884,0.26871,0.26857};
-
-constexpr double scale_of_stored_gammas_n5 = 1545.88;
-constexpr double scale_of_stored_incomplete_gammas_n5 = 531.27;
-
-constexpr double stored_complete_gamma_values_n5[] = {1.0,1.0,0.99999,0.99998,0.99997,0.99996,0.99994,0.99991,0.99989,0.99986,0.99983,0.99979,0.99975,0.99971,0.99966,0.99961,0.99956,0.9995,0.99944,0.99938,0.99931,0.99924,0.99917,0.99909,0.99901,0.99893,0.99884,0.99875,0.99866,0.99856,0.99846,0.99836,0.99826,0.99815,0.99804,0.99792,0.99781,0.99768,0.99756,0.99743,0.9973,0.99717,0.99704,0.9969,0.99675,0.99661,0.99646,0.99631,0.99616,0.996,0.99584,0.99568,0.99551,0.99534,0.99517,0.995,0.99482,0.99464,0.99446,0.99427,0.99408,0.99389,0.9937,0.9935,0.9933,0.9931,0.99289,0.99269,0.99248,0.99226,0.99205,0.99183,0.99161,0.99138,0.99115,0.99093,0.99069,0.99046,0.99022,0.98998,0.98974,0.98949,0.98925,0.989,0.98874,0.98849,0.98823,0.98797,0.98771,0.98744,0.98717,0.9869,0.98663,0.98635,0.98608,0.9858,0.98551,0.98523,0.98494,0.98465,
-0.98436,0.98406,0.98377,0.98347,0.98317,0.98286,0.98256,0.98225,0.98194,0.98162,0.98131,0.98099,0.98067,0.98035,0.98002,0.97969,0.97936,0.97903,0.9787,0.97836,0.97803,0.97769,0.97734,0.977,0.97665,0.9763,0.97595,0.9756,0.97524,0.97489,0.97453,0.97416,0.9738,0.97343,0.97307,0.9727,0.97232,0.97195,0.97157,0.9712,0.97082,0.97043,0.97005,0.96966,0.96928,0.96889,0.96849,0.9681,0.9677,0.96731,0.96691,0.9665,0.9661,0.9657,0.96529,0.96488,0.96447,0.96405,0.96364,0.96322,0.9628,0.96238,0.96196,0.96154,0.96111,0.96068,0.96026,0.95982,0.95939,0.95896,0.95852,0.95808,0.95764,0.9572,0.95676,0.95631,0.95586,0.95542,0.95497,0.95451,0.95406,0.9536,0.95315,0.95269,0.95223,0.95177,0.9513,0.95084,0.95037,0.9499,0.94943,0.94896,0.94849,0.94801,0.94754,0.94706,0.94658,0.9461,0.94562,0.94513,
-0.94465,0.94416,0.94367,0.94318,0.94269,0.9422,0.9417,0.94121,0.94071,0.94021,0.93971,0.93921,0.9387,0.9382,0.93769,0.93719,0.93668,0.93617,0.93566,0.93514,0.93463,0.93411,0.9336,0.93308,0.93256,0.93204,0.93151,0.93099,0.93046,0.92994,0.92941,0.92888,0.92835,0.92782,0.92728,0.92675,0.92621,0.92568,0.92514,0.9246,0.92406,0.92352,0.92297,0.92243,0.92188,0.92134,0.92079,0.92024,0.91969,0.91914,0.91859,0.91803,0.91748,0.91692,0.91636,0.91581,0.91525,0.91469,0.91412,0.91356,0.913,0.91243,0.91186,0.9113,0.91073,0.91016,0.90959,0.90902,0.90844,0.90787,0.9073,0.90672,0.90614,0.90556,0.90498,0.9044,0.90382,0.90324,0.90266,0.90207,0.90149,0.9009,0.90032,0.89973,0.89914,0.89855,0.89796,0.89737,0.89677,0.89618,0.89558,0.89499,0.89439,0.89379,0.8932,0.8926,0.892,0.89139,0.89079,0.89019,
-0.88959,0.88898,0.88838,0.88777,0.88716,0.88655,0.88594,0.88533,0.88472,0.88411,0.8835,0.88289,0.88227,0.88166,0.88104,0.88043,0.87981,0.87919,0.87857,0.87795,0.87733,0.87671,0.87609,0.87547,0.87484,0.87422,0.87359,0.87297,0.87234,0.87171,0.87109,0.87046,0.86983,0.8692,0.86857,0.86794,0.8673,0.86667,0.86604,0.8654,0.86477,0.86413,0.8635,0.86286,0.86222,0.86158,0.86095,0.86031,0.85967,0.85903,0.85838,0.85774,0.8571,0.85646,0.85581,0.85517,0.85452,0.85388,0.85323,0.85258,0.85194,0.85129,0.85064,0.84999,0.84934,0.84869,0.84804,0.84739,0.84674,0.84608,0.84543,0.84478,0.84412,0.84347,0.84281,0.84216,0.8415,0.84085,0.84019,0.83953,0.83887,0.83821,0.83755,0.8369,0.83623,0.83557,0.83491,0.83425,0.83359,0.83293,0.83226,0.8316,0.83094,0.83027,0.82961,0.82894,0.82828,0.82761,0.82694,0.82628,
-0.82561,0.82494,0.82427,0.82361,0.82294,0.82227,0.8216,0.82093,0.82026,0.81959,0.81892,0.81824,0.81757,0.8169,0.81623,0.81555,0.81488,0.81421,0.81353,0.81286,0.81218,0.81151,0.81083,0.81016,0.80948,0.8088,0.80813,0.80745,0.80677,0.8061,0.80542,0.80474,0.80406,0.80338,0.8027,0.80202,0.80134,0.80066,0.79998,0.7993,0.79862,0.79794,0.79726,0.79658,0.7959,0.79521,0.79453,0.79385,0.79317,0.79248,0.7918,0.79112,0.79043,0.78975,0.78906,0.78838,0.78769,0.78701,0.78632,0.78564,0.78495,0.78427,0.78358,0.7829,0.78221,0.78152,0.78084,0.78015,0.77946,0.77878,0.77809,0.7774,0.77671,0.77602,0.77534,0.77465,0.77396,0.77327,0.77258,0.77189,0.77121,0.77052,0.76983,0.76914,0.76845,0.76776,0.76707,0.76638,0.76569,0.765,0.76431,0.76362,0.76293,0.76224,0.76155,0.76086,0.76017,0.75947,0.75878,0.75809,
-0.7574,0.75671,0.75602,0.75533,0.75464,0.75394,0.75325,0.75256,0.75187,0.75118,0.75049,0.74979,0.7491,0.74841,0.74772,0.74703,0.74633,0.74564,0.74495,0.74426,0.74356,0.74287,0.74218,0.74149,0.7408,0.7401,0.73941,0.73872,0.73803,0.73733,0.73664,0.73595,0.73526,0.73456,0.73387,0.73318,0.73249,0.73179,0.7311,0.73041,0.72972,0.72902,0.72833,0.72764,0.72695,0.72625,0.72556,0.72487,0.72418,0.72349,0.72279,0.7221,0.72141,0.72072,0.72003,0.71933,0.71864,0.71795,0.71726,0.71657,0.71588,0.71519,0.71449,0.7138,0.71311,0.71242,0.71173,0.71104,0.71035,0.70966,0.70897,0.70827,0.70758,0.70689,0.7062,0.70551,0.70482,0.70413,0.70344,0.70275,0.70206,0.70137,0.70068,0.69999,0.69931,0.69862,0.69793,0.69724,0.69655,0.69586,0.69517,0.69448,0.6938,0.69311,0.69242,0.69173,0.69104,0.69036,0.68967,0.68898,
-0.68829,0.68761,0.68692,0.68623,0.68555,0.68486,0.68417,0.68349,0.6828,0.68212,0.68143,0.68074,0.68006,0.67937,0.67869,0.678,0.67732,0.67664,0.67595,0.67527,0.67458,0.6739,0.67322,0.67253,0.67185,0.67117,0.67048,0.6698,0.66912,0.66844,0.66775,0.66707,0.66639,0.66571,0.66503,0.66435,0.66367,0.66299,0.66231,0.66163,0.66095,0.66027,0.65959,0.65891,0.65823,0.65755,0.65687,0.65619,0.65551,0.65484,0.65416,0.65348,0.6528,0.65213,0.65145,0.65077,0.6501,0.64942,0.64875,0.64807,0.6474,0.64672,0.64605,0.64537,0.6447,0.64402,0.64335,0.64268,0.642,0.64133,0.64066,0.63998,0.63931,0.63864,0.63797,0.6373,0.63663,0.63596,0.63529,0.63462,0.63394,0.63328,0.63261,0.63194,0.63127,0.6306,0.62993,0.62926,0.62859,0.62793,0.62726,0.62659,0.62593,0.62526,0.62459,0.62393,0.62326,0.6226,0.62193,0.62127,
-0.6206,0.61994,0.61928,0.61861,0.61795,0.61729,0.61662,0.61596,0.6153,0.61464,0.61398,0.61332,0.61266,0.612,0.61134,0.61068,0.61002,0.60936,0.6087,0.60804,0.60738,0.60673,0.60607,0.60541,0.60476,0.6041,0.60344,0.60279,0.60213,0.60148,0.60082,0.60017,0.59951,0.59886,0.59821,0.59755,0.5969,0.59625,0.5956,0.59494,0.59429,0.59364,0.59299,0.59234,0.59169,0.59104,0.59039,0.58974,0.5891,0.58845,0.5878,0.58715,0.5865,0.58586,0.58521,0.58457,0.58392,0.58327,0.58263,0.58199,0.58134,0.5807,0.58005,0.57941,0.57877,0.57813,0.57748,0.57684,0.5762,0.57556,0.57492,0.57428,0.57364,0.573,0.57236,0.57172,0.57108,0.57045,0.56981,0.56917,0.56853,0.5679,0.56726,0.56663,0.56599,0.56536,0.56472,0.56409,0.56345,0.56282,0.56219,0.56156,0.56092,0.56029,0.55966,0.55903,0.5584,0.55777,0.55714,0.55651,
-0.55588,0.55525,0.55462,0.554,0.55337,0.55274,0.55212,0.55149,0.55086,0.55024,0.54961,0.54899,0.54836,0.54774,0.54712,0.54649,0.54587,0.54525,0.54463,0.54401,0.54339,0.54277,0.54215,0.54153,0.54091,0.54029,0.53967,0.53905,0.53843,0.53782,0.5372,0.53658,0.53597,0.53535,0.53474,0.53412,0.53351,0.53289,0.53228,0.53167,0.53106,0.53044,0.52983,0.52922,0.52861,0.528,0.52739,0.52678,0.52617,0.52556,0.52495,0.52435,0.52374,0.52313,0.52252,0.52192,0.52131,0.52071,0.5201,0.5195,0.51889,0.51829,0.51769,0.51709,0.51648,0.51588,0.51528,0.51468,0.51408,0.51348,0.51288,0.51228,0.51168,0.51108,0.51049,0.50989,0.50929,0.50869,0.5081,0.5075,0.50691,0.50631,0.50572,0.50512,0.50453,0.50394,0.50335,0.50275,0.50216,0.50157,0.50098,0.50039,0.4998,0.49921,0.49862,0.49803,0.49745,0.49686,0.49627,0.49568,
-0.4951,0.49451,0.49393,0.49334,0.49276,0.49217,0.49159,0.49101,0.49042,0.48984,0.48926,0.48868,0.4881,0.48752,0.48694,0.48636,0.48578,0.4852,0.48462,0.48405,0.48347,0.48289,0.48232,0.48174,0.48116,0.48059,0.48002,0.47944,0.47887,0.4783,0.47772,0.47715,0.47658,0.47601,0.47544,0.47487,0.4743,0.47373,0.47316,0.47259,0.47202,0.47146,0.47089,0.47032,0.46976,0.46919,0.46863,0.46806,0.4675,0.46693,0.46637,0.46581,0.46524,0.46468,0.46412,0.46356,0.463,0.46244,0.46188,0.46132,0.46076,0.46021,0.45965,0.45909,0.45853,0.45798,0.45742,0.45687,0.45631,0.45576,0.4552,0.45465,0.4541,0.45355,0.45299,0.45244,0.45189,0.45134,0.45079,0.45024,0.44969,0.44914,0.44859,0.44805,0.4475,0.44695,0.44641,0.44586,0.44532,0.44477,0.44423,0.44368,0.44314,0.4426,0.44205,0.44151,0.44097,0.44043,0.43989,0.43935,
-0.43881,0.43827,0.43773,0.43719,0.43665,0.43612,0.43558,0.43504,0.43451,0.43397,0.43344,0.4329,0.43237,0.43184,0.4313,0.43077,0.43024,0.42971,0.42918,0.42865,0.42812,0.42759,0.42706,0.42653,0.426,0.42547,0.42495,0.42442,0.42389,0.42337,0.42284,0.42232,0.42179,0.42127,0.42074,0.42022,0.4197,0.41918,0.41866,0.41814,0.41761,0.41709,0.41657,0.41606,0.41554,0.41502,0.4145,0.41398,0.41347,0.41295,0.41244,0.41192,0.41141,0.41089,0.41038,0.40986,0.40935,0.40884,0.40833,0.40782,0.4073,0.40679,0.40628,0.40577,0.40527,0.40476,0.40425,0.40374,0.40323,0.40273,0.40222,0.40172,0.40121,0.40071,0.4002,0.3997,0.3992,0.39869,0.39819,0.39769,0.39719,0.39669,0.39619,0.39569,0.39519,0.39469,0.39419,0.39369,0.39319,0.3927,0.3922,0.3917,0.39121,0.39071,0.39022,0.38972,0.38923,0.38874,0.38824,0.38775,
-0.38726,0.38677,0.38628,0.38579,0.3853,0.38481,0.38432,0.38383,0.38334,0.38286,0.38237,0.38188,0.3814,0.38091,0.38043,0.37994,0.37946,0.37897,0.37849,0.37801,0.37753,0.37704,0.37656,0.37608,0.3756,0.37512,0.37464,0.37416,0.37368,0.37321,0.37273,0.37225,0.37178,0.3713,0.37082,0.37035,0.36987,0.3694,0.36893,0.36845,0.36798,0.36751,0.36704,0.36657,0.36609,0.36562,0.36515,0.36469,0.36422,0.36375,0.36328,0.36281,0.36235,0.36188,0.36141,0.36095,0.36048,0.36002,0.35955,0.35909,0.35863,0.35816,0.3577,0.35724,0.35678,0.35632,0.35586,0.3554,0.35494,0.35448,0.35402,0.35356,0.3531,0.35265,0.35219,0.35173,0.35128,0.35082,0.35037,0.34991,0.34946,0.34901,0.34855,0.3481,0.34765,0.3472,0.34675,0.3463,0.34585,0.3454,0.34495,0.3445,0.34405,0.3436,0.34316,0.34271,0.34226,0.34182,0.34137,0.34093,
-0.34048,0.34004,0.3396,0.33915,0.33871,0.33827,0.33783,0.33739,0.33694,0.3365,0.33606,0.33563,0.33519,0.33475,0.33431,0.33387,0.33344,0.333,0.33256,0.33213,0.33169,0.33126,0.33082,0.33039,0.32996,0.32952,0.32909,0.32866,0.32823,0.3278,0.32737,0.32694,0.32651,0.32608,0.32565,0.32522,0.32479,0.32436,0.32394,0.32351,0.32309,0.32266,0.32224,0.32181,0.32139,0.32096,0.32054,0.32012,0.31969,0.31927,0.31885,0.31843,0.31801,0.31759,0.31717,0.31675,0.31633,0.31591,0.3155,0.31508,0.31466,0.31425,0.31383,0.31341,0.313,0.31258,0.31217,0.31176,0.31134,0.31093,0.31052,0.31011,0.3097,0.30928,0.30887,0.30846,0.30805,0.30765,0.30724,0.30683,0.30642,0.30601,0.30561,0.3052,0.30479,0.30439,0.30398,0.30358,0.30317,0.30277,0.30237,0.30196,0.30156,0.30116,0.30076,0.30036,0.29996,0.29956,0.29916,0.29876,
-0.29836,0.29796,0.29756,0.29716,0.29677,0.29637,0.29597,0.29558,0.29518,0.29479,0.29439,0.294,0.29361,0.29321,0.29282,0.29243,0.29204,0.29164,0.29125,0.29086,0.29047,0.29008,0.28969,0.28931,0.28892,0.28853,0.28814,0.28775,0.28737,0.28698,0.2866,0.28621,0.28583,0.28544,0.28506,0.28467,0.28429,0.28391,0.28353,0.28314,0.28276,0.28238,0.282,0.28162,0.28124,0.28086,0.28048,0.28011,0.27973,0.27935,0.27897,0.2786,0.27822,0.27785,0.27747,0.2771,0.27672,0.27635,0.27597,0.2756,0.27523,0.27485,0.27448,0.27411,0.27374,0.27337,0.273,0.27263,0.27226,0.27189,0.27152,0.27116,0.27079,0.27042,0.27005,0.26969,0.26932,0.26896,0.26859,0.26823,0.26786,0.2675,0.26713,0.26677,0.26641,0.26605,0.26568,0.26532,0.26496,0.2646,0.26424,0.26388,0.26352,0.26316,0.26281,0.26245,0.26209,0.26173,0.26138,0.26102,
-0.26066,0.26031,0.25995,0.2596,0.25925,0.25889,0.25854,0.25819,0.25783,0.25748,0.25713,0.25678,0.25643,0.25608,0.25573,0.25538,0.25503,0.25468,0.25433,0.25398,0.25363,0.25329,0.25294,0.25259,0.25225,0.2519,0.25156,0.25121,0.25087,0.25052,0.25018,0.24984,0.24949,0.24915,0.24881,0.24847,0.24813,0.24779,0.24745,0.24711,0.24677,0.24643,0.24609,0.24575,0.24541,0.24508,0.24474,0.2444,0.24407,0.24373,0.24339,0.24306,0.24272,0.24239,0.24206,0.24172,0.24139,0.24106,0.24072,0.24039,0.24006,0.23973,0.2394,0.23907,0.23874,0.23841,0.23808,0.23775,0.23742,0.2371,0.23677,0.23644,0.23611,0.23579,0.23546,0.23514,0.23481,0.23449,0.23416,0.23384,0.23351,0.23319,0.23287,0.23255,0.23222,0.2319,0.23158,0.23126,0.23094,0.23062,0.2303,0.22998,0.22966,0.22934,0.22902,0.22871,0.22839,0.22807,0.22775,0.22744,
-0.22712,0.22681,0.22649,0.22618,0.22586,0.22555,0.22524,0.22492,0.22461,0.2243,0.22399,0.22367,0.22336,0.22305,0.22274,0.22243,0.22212,0.22181,0.2215,0.22119,0.22089,0.22058,0.22027,0.21996,0.21966,0.21935,0.21904,0.21874,0.21843,0.21813,0.21782,0.21752,0.21722,0.21691,0.21661,0.21631,0.216,0.2157,0.2154,0.2151,0.2148,0.2145,0.2142,0.2139,0.2136,0.2133,0.213,0.2127,0.21241,0.21211,0.21181,0.21151,0.21122,0.21092,0.21063,0.21033,0.21004,0.20974,0.20945,0.20915,0.20886,0.20857,0.20827,0.20798,0.20769,0.2074,0.20711,0.20682,0.20653,0.20624,0.20595,0.20566,0.20537,0.20508,0.20479,0.2045,0.20421,0.20393,0.20364,0.20335,0.20307,0.20278,0.2025,0.20221,0.20193,0.20164,0.20136,0.20107,0.20079,0.20051,0.20022,0.19994,0.19966,0.19938,0.1991,0.19882,0.19854,0.19826,0.19798,0.1977,
-0.19742,0.19714,0.19686,0.19658,0.1963,0.19603,0.19575,0.19547,0.1952,0.19492,0.19465,0.19437,0.1941,0.19382,0.19355,0.19327,0.193,0.19273,0.19245,0.19218,0.19191,0.19164,0.19136,0.19109,0.19082,0.19055,0.19028,0.19001,0.18974,0.18947,0.1892,0.18894,0.18867,0.1884,0.18813,0.18787,0.1876,0.18733,0.18707,0.1868,0.18654,0.18627,0.18601,0.18574,0.18548,0.18521,0.18495,0.18469,0.18443,0.18416,0.1839,0.18364,0.18338,0.18312,0.18286,0.1826,0.18234,0.18208,0.18182,0.18156,0.1813,0.18104,0.18078,0.18052,0.18027,0.18001,0.17975,0.1795,0.17924,0.17899,0.17873,0.17848,0.17822,0.17797,0.17771,0.17746,0.1772,0.17695,0.1767,0.17645,0.17619,0.17594,0.17569,0.17544,0.17519,0.17494,0.17469,0.17444,0.17419,0.17394,0.17369,0.17344,0.17319,0.17295,0.1727,0.17245,0.1722,0.17196,0.17171,0.17147,
-0.17122,0.17097,0.17073,0.17048,0.17024,0.17,0.16975,0.16951,0.16927,0.16902,0.16878,0.16854,0.1683,0.16805,0.16781,0.16757,0.16733,0.16709,0.16685,0.16661,0.16637,0.16613,0.16589,0.16566,0.16542,0.16518,0.16494,0.1647,0.16447,0.16423,0.16399,0.16376,0.16352,0.16329,0.16305,0.16282,0.16258,0.16235,0.16212,0.16188,0.16165,0.16142,0.16118,0.16095,0.16072,0.16049,0.16026,0.16002,0.15979,0.15956,0.15933,0.1591,0.15887,0.15864,0.15841,0.15819,0.15796,0.15773,0.1575,0.15727,0.15705,0.15682,0.15659,0.15637,0.15614,0.15592,0.15569,0.15546,0.15524,0.15502,0.15479,0.15457,0.15434,0.15412,0.1539,0.15367,0.15345,0.15323,0.15301,0.15279,0.15257,0.15234,0.15212,0.1519,0.15168,0.15146,0.15124,0.15102,0.15081,0.15059,0.15037,0.15015,0.14993,0.14971,0.1495,0.14928,0.14906,0.14885,0.14863,0.14842,
-0.1482,0.14798,0.14777,0.14755,0.14734,0.14713,0.14691,0.1467,0.14649,0.14627,0.14606,0.14585,0.14564,0.14542,0.14521,0.145,0.14479,0.14458,0.14437,0.14416,0.14395,0.14374,0.14353,0.14332,0.14311,0.1429,0.1427,0.14249,0.14228,0.14207,0.14187,0.14166,0.14145,0.14125,0.14104,0.14083,0.14063,0.14042,0.14022,0.14001,0.13981,0.13961,0.1394,0.1392,0.139,0.13879,0.13859,0.13839,0.13818,0.13798,0.13778,0.13758,0.13738,0.13718,0.13698,0.13678,0.13658,0.13638,0.13618,0.13598,0.13578,0.13558,0.13538,0.13518,0.13499,0.13479,0.13459,0.1344,0.1342,0.134,0.13381,0.13361,0.13341,0.13322,0.13302,0.13283,0.13263,0.13244,0.13224,0.13205,0.13186,0.13166,0.13147,0.13128,0.13109,0.13089,0.1307,0.13051,0.13032,0.13013,0.12994,0.12974,0.12955,0.12936,0.12917,0.12898,0.12879,0.12861,0.12842,0.12823,
-0.12804,0.12785,0.12766,0.12748,0.12729,0.1271,0.12691,0.12673,0.12654,0.12635,0.12617,0.12598,0.1258,0.12561,0.12543,0.12524,0.12506,0.12487,0.12469,0.12451,0.12432,0.12414,0.12396,0.12377,0.12359,0.12341,0.12323,0.12305,0.12286,0.12268,0.1225,0.12232,0.12214,0.12196,0.12178,0.1216,0.12142,0.12124,0.12106,0.12089,0.12071,0.12053,0.12035,0.12017,0.12,0.11982,0.11964,0.11946,0.11929,0.11911,0.11894,0.11876,0.11858,0.11841,0.11823,0.11806,0.11788,0.11771,0.11754,0.11736,0.11719,0.11701,0.11684,0.11667,0.1165,0.11632,0.11615,0.11598,0.11581,0.11564,0.11546,0.11529,0.11512,0.11495,0.11478,0.11461,0.11444,0.11427,0.1141,0.11393,0.11376,0.1136,0.11343,0.11326,0.11309,0.11292,0.11276,0.11259,0.11242,0.11225,0.11209,0.11192,0.11175,0.11159,0.11142,0.11126,0.11109,0.11093,0.11076,0.1106,
-0.11043,0.11027,0.11011,0.10994,0.10978,0.10962,0.10945,0.10929,0.10913,0.10896,0.1088,0.10864,0.10848,0.10832,0.10816,0.108,0.10783,0.10767,0.10751,0.10735,0.10719,0.10703,0.10687,0.10672,0.10656,0.1064,0.10624,0.10608,0.10592,0.10577,0.10561,0.10545,0.10529,0.10514,0.10498,0.10482,0.10467,0.10451,0.10435,0.1042,0.10404,0.10389,0.10373,0.10358,0.10342,0.10327,0.10311,0.10296,0.10281,0.10265,0.1025,0.10235,0.10219,0.10204,0.10189,0.10174,0.10158,0.10143,0.10128,0.10113,0.10098,0.10083,0.10068,0.10053,0.10038,0.10023,0.10008,0.09993,0.09978,0.09963,0.09948,0.09933,0.09918,0.09903,0.09888,0.09874,0.09859,0.09844,0.09829,0.09815,0.098,0.09785,0.0977,0.09756,0.09741,0.09727,0.09712,0.09697,0.09683,0.09668,0.09654,0.09639,0.09625,0.09611,0.09596,0.09582,0.09567,0.09553,0.09539,0.09524,
-0.0951,0.09496,0.09482,0.09467,0.09453,0.09439,0.09425,0.09411,0.09396,0.09382,0.09368,0.09354,0.0934,0.09326,0.09312,0.09298,0.09284,0.0927,0.09256,0.09242,0.09228,0.09214,0.09201,0.09187,0.09173,0.09159,0.09145,0.09132,0.09118,0.09104,0.0909,0.09077,0.09063,0.09049,0.09036,0.09022,0.09009,0.08995,0.08981,0.08968,0.08954,0.08941,0.08927,0.08914,0.08901,0.08887,0.08874,0.0886,0.08847,0.08834,0.0882,0.08807,0.08794,0.0878,0.08767,0.08754,0.08741,0.08728,0.08714,0.08701,0.08688,0.08675,0.08662,0.08649,0.08636,0.08623,0.0861,0.08597,0.08584,0.08571,0.08558,0.08545,0.08532,0.08519,0.08506,0.08493,0.08481,0.08468,0.08455,0.08442,0.08429,0.08417,0.08404,0.08391,0.08378,0.08366,0.08353,0.0834,0.08328,0.08315,0.08303,0.0829,0.08278,0.08265,0.08252,0.0824,0.08228,0.08215,0.08203,0.0819,
-0.08178,0.08165,0.08153,0.08141,0.08128,0.08116,0.08104,0.08091,0.08079,0.08067,0.08055,0.08042,0.0803,0.08018,0.08006,0.07994,0.07982,0.0797,0.07957,0.07945,0.07933,0.07921,0.07909,0.07897,0.07885,0.07873,0.07861,0.07849,0.07837,0.07826,0.07814,0.07802,0.0779,0.07778,0.07766,0.07755,0.07743,0.07731,0.07719,0.07707,0.07696,0.07684,0.07672,0.07661,0.07649,0.07637,0.07626,0.07614,0.07603,0.07591,0.0758,0.07568,0.07556,0.07545,0.07533,0.07522,0.07511,0.07499,0.07488,0.07476,0.07465,0.07454,0.07442,0.07431,0.0742,0.07408,0.07397,0.07386,0.07374,0.07363,0.07352,0.07341,0.0733,0.07318,0.07307,0.07296,0.07285,0.07274,0.07263,0.07252,0.07241,0.0723,0.07219,0.07208,0.07197,0.07186,0.07175,0.07164,0.07153,0.07142,0.07131,0.0712,0.07109,0.07098,0.07087,0.07077,0.07066,0.07055,0.07044,0.07033,
-0.07023,0.07012,0.07001,0.06991,0.0698,0.06969,0.06959,0.06948,0.06937,0.06927,0.06916,0.06906,0.06895,0.06884,0.06874,0.06863,0.06853,0.06842,0.06832,0.06821,0.06811,0.06801,0.0679,0.0678,0.06769,0.06759,0.06749,0.06738,0.06728,0.06718,0.06707,0.06697,0.06687,0.06677,0.06666,0.06656,0.06646,0.06636,0.06626,0.06616,0.06605,0.06595,0.06585,0.06575,0.06565,0.06555,0.06545,0.06535,0.06525,0.06515,0.06505,0.06495,0.06485,0.06475,0.06465,0.06455,0.06445,0.06435,0.06425,0.06416,0.06406,0.06396,0.06386,0.06376,0.06366,0.06357,0.06347,0.06337,0.06327,0.06318,0.06308,0.06298,0.06289,0.06279,0.06269,0.0626,0.0625,0.0624,0.06231,0.06221,0.06212,0.06202,0.06193,0.06183,0.06174,0.06164,0.06155,0.06145,0.06136,0.06126,0.06117,0.06107,0.06098,0.06089,0.06079,0.0607,0.06061,0.06051,0.06042,0.06033,
-0.06023,0.06014,0.06005,0.05996,0.05986,0.05977,0.05968,0.05959,0.05949,0.0594,0.05931,0.05922,0.05913,0.05904,0.05895,0.05886,0.05877,0.05867,0.05858,0.05849,0.0584,0.05831,0.05822,0.05813,0.05804,0.05795,0.05787,0.05778,0.05769,0.0576,0.05751,0.05742,0.05733,0.05724,0.05715,0.05707,0.05698,0.05689,0.0568,0.05671,0.05663,0.05654,0.05645,0.05636,0.05628,0.05619,0.0561,0.05602,0.05593,0.05584,0.05576,0.05567,0.05559,0.0555,0.05541,0.05533,0.05524,0.05516,0.05507,0.05499,0.0549,0.05482,0.05473,0.05465,0.05456,0.05448,0.05439,0.05431,0.05423,0.05414,0.05406,0.05397,0.05389,0.05381,0.05372,0.05364,0.05356,0.05348,0.05339,0.05331,0.05323,0.05314,0.05306,0.05298,0.0529,0.05282,0.05273,0.05265,0.05257,0.05249,0.05241,0.05233,0.05225,0.05216,0.05208,0.052,0.05192,0.05184,0.05176,0.05168,
-0.0516,0.05152,0.05144,0.05136,0.05128,0.0512,0.05112,0.05104,0.05096,0.05088,0.05081,0.05073,0.05065,0.05057,0.05049,0.05041,0.05033,0.05026,0.05018,0.0501,0.05002,0.04994,0.04987,0.04979,0.04971,0.04964,0.04956,0.04948,0.0494,0.04933,0.04925,0.04917,0.0491,0.04902,0.04894,0.04887,0.04879,0.04872,0.04864,0.04857,0.04849,0.04841,0.04834,0.04826,0.04819,0.04811,0.04804,0.04796,0.04789,0.04782,0.04774,0.04767,0.04759,0.04752,0.04744,0.04737,0.0473,0.04722,0.04715,0.04708,0.047,0.04693,0.04686,0.04678,0.04671,0.04664,0.04657,0.04649,0.04642,0.04635,0.04628,0.0462,0.04613,0.04606,0.04599,0.04592,0.04584,0.04577,0.0457,0.04563,0.04556,0.04549,0.04542,0.04535,0.04528,0.0452,0.04513,0.04506,0.04499,0.04492,0.04485,0.04478,0.04471,0.04464,0.04457,0.0445,0.04443,0.04437,0.0443,0.04423,
-0.04416,0.04409,0.04402,0.04395,0.04388,0.04381,0.04375,0.04368,0.04361,0.04354,0.04347,0.0434,0.04334,0.04327,0.0432,0.04313,0.04307,0.043,0.04293,0.04287,0.0428,0.04273,0.04266,0.0426,0.04253,0.04246,0.0424,0.04233,0.04227,0.0422,0.04213,0.04207,0.042,0.04194,0.04187,0.0418,0.04174,0.04167,0.04161,0.04154,0.04148,0.04141,0.04135,0.04128,0.04122,0.04115,0.04109,0.04103,0.04096,0.0409,0.04083,0.04077,0.04071,0.04064,0.04058,0.04051,0.04045,0.04039,0.04032,0.04026,0.0402,0.04013,0.04007,0.04001,0.03995,0.03988,0.03982,0.03976,0.0397,0.03963,0.03957,0.03951,0.03945,0.03939,0.03932,0.03926,0.0392,0.03914,0.03908,0.03902,0.03896,0.03889,0.03883,0.03877,0.03871,0.03865,0.03859,0.03853,0.03847,0.03841,0.03835,0.03829,0.03823,0.03817,0.03811,0.03805,0.03799,0.03793,0.03787,0.03781,
-0.03775,0.03769,0.03763,0.03757,0.03751,0.03745,0.0374,0.03734,0.03728,0.03722,0.03716,0.0371,0.03704,0.03699,0.03693,0.03687,0.03681,0.03675,0.0367,0.03664,0.03658,0.03652,0.03647,0.03641,0.03635,0.03629,0.03624,0.03618,0.03612,0.03607,0.03601,0.03595,0.0359,0.03584,0.03578,0.03573,0.03567,0.03561,0.03556,0.0355,0.03545,0.03539,0.03533,0.03528,0.03522,0.03517,0.03511,0.03506,0.035,0.03495,0.03489,0.03484,0.03478,0.03473,0.03467,0.03462,0.03456,0.03451,0.03445,0.0344,0.03434,0.03429,0.03424,0.03418,0.03413,0.03407,0.03402,0.03397,0.03391,0.03386,0.03381,0.03375,0.0337,0.03365,0.03359,0.03354,0.03349,0.03343,0.03338,0.03333,0.03328,0.03322,0.03317,0.03312,0.03307,0.03301,0.03296,0.03291,0.03286,0.03281,0.03275,0.0327,0.03265,0.0326,0.03255,0.0325,0.03245,0.03239,0.03234,0.03229,
-0.03224,0.03219,0.03214,0.03209,0.03204,0.03199,0.03194,0.03189,0.03184,0.03178,0.03173,0.03168,0.03163,0.03158,0.03153,0.03148,0.03143,0.03138,0.03134,0.03129,0.03124,0.03119,0.03114,0.03109,0.03104,0.03099,0.03094,0.03089,0.03084,0.03079,0.03075,0.0307,0.03065,0.0306,0.03055,0.0305,0.03045,0.03041,0.03036,0.03031,0.03026,0.03021,0.03017,0.03012,0.03007,0.03002,0.02997,0.02993,0.02988,0.02983,0.02979,0.02974,0.02969,0.02964,0.0296,0.02955,0.0295,0.02946,0.02941,0.02936,0.02932,0.02927,0.02922,0.02918,0.02913,0.02908,0.02904,0.02899,0.02895,0.0289,0.02885,0.02881,0.02876,0.02872,0.02867,0.02863,0.02858,0.02854,0.02849,0.02844,0.0284,0.02835,0.02831,0.02826,0.02822,0.02817,0.02813,0.02809,0.02804,0.028,0.02795,0.02791,0.02786,0.02782,0.02777,0.02773,0.02769,0.02764,0.0276,0.02755,
-0.02751,0.02747,0.02742,0.02738,0.02734,0.02729,0.02725,0.02721,0.02716,0.02712,0.02708,0.02703,0.02699,0.02695,0.0269,0.02686,0.02682,0.02678,0.02673,0.02669,0.02665,0.02661,0.02656,0.02652,0.02648,0.02644,0.0264,0.02635,0.02631,0.02627,0.02623,0.02619,0.02614,0.0261,0.02606,0.02602,0.02598,0.02594,0.0259,0.02585,0.02581,0.02577,0.02573,0.02569,0.02565,0.02561,0.02557,0.02553,0.02549,0.02545,0.0254,0.02536,0.02532,0.02528,0.02524,0.0252,0.02516,0.02512,0.02508,0.02504,0.025,0.02496,0.02492,0.02488,0.02484,0.0248,0.02476,0.02472,0.02469,0.02465,0.02461,0.02457,0.02453,0.02449,0.02445,0.02441,0.02437,0.02433,0.02429,0.02426,0.02422,0.02418,0.02414,0.0241,0.02406,0.02402,0.02399,0.02395,0.02391,0.02387,0.02383,0.02379,0.02376,0.02372,0.02368,0.02364,0.02361,0.02357,0.02353,0.02349,
-0.02345,0.02342,0.02338,0.02334,0.02331,0.02327,0.02323,0.02319,0.02316,0.02312,0.02308,0.02305,0.02301,0.02297,0.02294,0.0229,0.02286,0.02283,0.02279,0.02275,0.02272,0.02268,0.02264,0.02261,0.02257,0.02254,0.0225,0.02246,0.02243,0.02239,0.02236,0.02232,0.02228,0.02225,0.02221,0.02218,0.02214,0.02211,0.02207,0.02204,0.022,0.02197,0.02193,0.02189,0.02186,0.02182,0.02179,0.02175,0.02172,0.02169,0.02165,0.02162,0.02158,0.02155,0.02151,0.02148,0.02144,0.02141,0.02137,0.02134,0.02131,0.02127,0.02124,0.0212,0.02117,0.02114,0.0211,0.02107,0.02103,0.021,0.02097,0.02093,0.0209,0.02087,0.02083,0.0208,0.02077,0.02073,0.0207,0.02067,0.02063,0.0206,0.02057,0.02053,0.0205,0.02047,0.02044,0.0204,0.02037,0.02034,0.0203,0.02027,0.02024,0.02021,0.02017,0.02014,0.02011,0.02008,0.02005,0.02001,
-0.01998,0.01995,0.01992,0.01988,0.01985,0.01982,0.01979,0.01976,0.01973,0.01969,0.01966,0.01963,0.0196,0.01957,0.01954,0.0195,0.01947,0.01944,0.01941,0.01938,0.01935,0.01932,0.01929,0.01926,0.01922,0.01919,0.01916,0.01913,0.0191,0.01907,0.01904,0.01901,0.01898,0.01895,0.01892,0.01889,0.01886,0.01883,0.0188,0.01877,0.01874,0.01871,0.01868,0.01865,0.01862,0.01859,0.01856,0.01853,0.0185,0.01847,0.01844,0.01841,0.01838,0.01835,0.01832,0.01829,0.01826,0.01823,0.0182,0.01817,0.01814,0.01811,0.01808,0.01805,0.01803,0.018,0.01797,0.01794,0.01791,0.01788,0.01785,0.01782,0.01779,0.01777,0.01774,0.01771,0.01768,0.01765,0.01762,0.01759,0.01757,0.01754,0.01751,0.01748,0.01745,0.01743,0.0174,0.01737,0.01734,0.01731,0.01729,0.01726,0.01723,0.0172,0.01717,0.01715,0.01712,0.01709,0.01706,0.01704,
-0.01701,0.01698,0.01695,0.01693,0.0169,0.01687,0.01684,0.01682,0.01679,0.01676,0.01674,0.01671,0.01668,0.01666,0.01663,0.0166,0.01657,0.01655,0.01652,0.01649,0.01647,0.01644,0.01641,0.01639,0.01636,0.01634,0.01631,0.01628,0.01626,0.01623,0.0162,0.01618,0.01615,0.01613,0.0161,0.01607,0.01605,0.01602,0.016,0.01597,0.01594,0.01592,0.01589,0.01587,0.01584,0.01582,0.01579,0.01576,0.01574,0.01571,0.01569,0.01566,0.01564,0.01561,0.01559,0.01556,0.01554,0.01551,0.01549,0.01546,0.01544,0.01541,0.01539,0.01536,0.01534,0.01531,0.01529,0.01526,0.01524,0.01521,0.01519,0.01516,0.01514,0.01511,0.01509,0.01507,0.01504,0.01502,0.01499,0.01497,0.01494,0.01492,0.0149,0.01487,0.01485,0.01482,0.0148,0.01478,0.01475,0.01473,0.0147,0.01468,0.01466,0.01463,0.01461,0.01459,0.01456,0.01454,0.01451,0.01449,
-0.01447,0.01444,0.01442,0.0144,0.01437,0.01435,0.01433,0.0143,0.01428,0.01426,0.01424,0.01421,0.01419,0.01417,0.01414,0.01412,0.0141,0.01407,0.01405,0.01403,0.01401,0.01398,0.01396,0.01394,0.01392,0.01389,0.01387,0.01385,0.01383,0.0138,0.01378,0.01376,0.01374,0.01371,0.01369,0.01367,0.01365,0.01362,0.0136,0.01358,0.01356,0.01354,0.01351,0.01349,0.01347,0.01345,0.01343,0.01341,0.01338,0.01336,0.01334,0.01332,0.0133,0.01328,0.01325,0.01323,0.01321,0.01319,0.01317,0.01315,0.01313,0.0131,0.01308,0.01306,0.01304,0.01302,0.013,0.01298,0.01296,0.01293,0.01291,0.01289,0.01287,0.01285,0.01283,0.01281,0.01279,0.01277,0.01275,0.01273,0.01271,0.01268,0.01266,0.01264,0.01262,0.0126,0.01258,0.01256,0.01254,0.01252,0.0125,0.01248,0.01246,0.01244,0.01242,0.0124,0.01238,0.01236,0.01234,0.01232,
-0.0123,0.01228,0.01226,0.01224,0.01222,0.0122,0.01218,0.01216,0.01214,0.01212,0.0121,0.01208,0.01206,0.01204,0.01202,0.012,0.01198,0.01196,0.01194,0.01192,0.0119,0.01189,0.01187,0.01185,0.01183,0.01181,0.01179,0.01177,0.01175,0.01173,0.01171,0.01169,0.01167,0.01165,0.01164,0.01162,0.0116,0.01158,0.01156,0.01154,0.01152,0.0115,0.01149,0.01147,0.01145,0.01143,0.01141,0.01139,0.01137,0.01135,0.01134,0.01132,0.0113,0.01128,0.01126,0.01124,0.01123,0.01121,0.01119,0.01117,0.01115,0.01113,0.01112,0.0111,0.01108,0.01106,0.01104,0.01103,0.01101,0.01099,0.01097,0.01095,0.01094,0.01092,0.0109,0.01088,0.01087,0.01085,0.01083,0.01081,0.01079,0.01078,0.01076,0.01074,0.01072,0.01071,0.01069,0.01067,0.01065,0.01064,0.01062,0.0106,0.01059,0.01057,0.01055,0.01053,0.01052,0.0105,0.01048,0.01046,
-0.01045,0.01043,0.01041,0.0104,0.01038,0.01036,0.01035,0.01033,0.01031,0.0103,0.01028,0.01026,0.01024,0.01023,0.01021,0.01019,0.01018,0.01016,0.01014,0.01013,0.01011,0.01009,0.01008,0.01006,0.01005,0.01003,0.01001,0.01,0.00998,0.00996,0.00995,0.00993,0.00991,0.0099,0.00988,0.00987,0.00985,0.00983,0.00982,0.0098,0.00979,0.00977,0.00975,0.00974,0.00972,0.00971,0.00969,0.00967,0.00966,0.00964,0.00963,0.00961,0.0096,0.00958,0.00956,0.00955,0.00953,0.00952,0.0095,0.00949,0.00947,0.00946,0.00944,0.00942,0.00941,0.00939,0.00938,0.00936,0.00935,0.00933,0.00932,0.0093,0.00929,0.00927,0.00926,0.00924,0.00923,0.00921,0.0092,0.00918,0.00917,0.00915,0.00914,0.00912,0.00911,0.00909,0.00908,0.00906,0.00905,0.00903,0.00902,0.009,0.00899,0.00897,0.00896,0.00894,0.00893,0.00891,0.0089,0.00888,
-0.00887,0.00886,0.00884,0.00883,0.00881,0.0088,0.00878,0.00877,0.00875,0.00874,0.00873,0.00871,0.0087,0.00868,0.00867,0.00865,0.00864,0.00863,0.00861,0.0086,0.00858,0.00857,0.00856,0.00854,0.00853,0.00851,0.0085,0.00849,0.00847,0.00846,0.00844,0.00843,0.00842,0.0084,0.00839,0.00837,0.00836,0.00835,0.00833,0.00832,0.00831,0.00829,0.00828,0.00827,0.00825,0.00824,0.00822,0.00821,0.0082,0.00818,0.00817,0.00816,0.00814,0.00813,0.00812,0.0081,0.00809,0.00808,0.00806,0.00805,0.00804,0.00802,0.00801,0.008,0.00798,0.00797,0.00796,0.00795,0.00793,0.00792,0.00791,0.00789,0.00788,0.00787,0.00785,0.00784,0.00783,0.00782,0.0078,0.00779,0.00778,0.00776,0.00775,0.00774,0.00773,0.00771,0.0077,0.00769,0.00768,0.00766,0.00765,0.00764,0.00763,0.00761,0.0076,0.00759,0.00758,0.00756,0.00755,0.00754,
-0.00753,0.00751,0.0075,0.00749,0.00748,0.00746,0.00745,0.00744,0.00743,0.00742,0.0074,0.00739,0.00738,0.00737,0.00735,0.00734,0.00733,0.00732,0.00731,0.00729,0.00728,0.00727,0.00726,0.00725,0.00723,0.00722,0.00721,0.0072,0.00719,0.00717,0.00716,0.00715,0.00714,0.00713,0.00712,0.0071,0.00709,0.00708,0.00707,0.00706,0.00705,0.00703,0.00702,0.00701,0.007,0.00699,0.00698,0.00697,0.00695,0.00694,0.00693,0.00692,0.00691,0.0069,0.00689,0.00687,0.00686,0.00685,0.00684,0.00683,0.00682,0.00681,0.00679,0.00678,0.00677,0.00676,0.00675,0.00674,0.00673,0.00672,0.00671,0.00669,0.00668,0.00667,0.00666,0.00665,0.00664,0.00663,0.00662,0.00661,0.0066,0.00659,0.00657,0.00656,0.00655,0.00654,0.00653,0.00652,0.00651,0.0065,0.00649,0.00648,0.00647,0.00646,0.00645,0.00643,0.00642,0.00641,0.0064,0.00639,
-0.00638,0.00637,0.00636,0.00635,0.00634,0.00633,0.00632,0.00631,0.0063,0.00629,0.00628,0.00627,0.00626,0.00625,0.00624,0.00623,0.00622,0.00621,0.00619,0.00618,0.00617,0.00616,0.00615,0.00614,0.00613,0.00612,0.00611,0.0061,0.00609,0.00608,0.00607,0.00606,0.00605,0.00604,0.00603,0.00602,0.00601,0.006,0.00599,0.00598,0.00597,0.00596,0.00595,0.00594,0.00593,0.00592,0.00591,0.0059,0.0059,0.00589,0.00588,0.00587,0.00586,0.00585,0.00584,0.00583,0.00582,0.00581,0.0058,0.00579,0.00578,0.00577,0.00576,0.00575,0.00574,0.00573,0.00572,0.00571,0.0057,0.00569,0.00568,0.00568,0.00567,0.00566,0.00565,0.00564,0.00563,0.00562,0.00561,0.0056,0.00559,0.00558,0.00557,0.00556,0.00555,0.00555,0.00554,0.00553,0.00552,0.00551,0.0055,0.00549,0.00548,0.00547,0.00546,0.00545,0.00544,0.00544,0.00543,0.00542,
-0.00541,0.0054,0.00539,0.00538,0.00537,0.00536,0.00536,0.00535,0.00534,0.00533,0.00532,0.00531,0.0053,0.00529,0.00528,0.00528,0.00527,0.00526,0.00525,0.00524,0.00523,0.00522,0.00522,0.00521,0.0052,0.00519,0.00518,0.00517,0.00516,0.00516,0.00515,0.00514,0.00513,0.00512,0.00511,0.0051,0.0051,0.00509,0.00508,0.00507,0.00506,0.00505,0.00505,0.00504,0.00503,0.00502,0.00501,0.005,0.005,0.00499,0.00498,0.00497,0.00496,0.00495,0.00495,0.00494,0.00493,0.00492,0.00491,0.0049,0.0049,0.00489,0.00488,0.00487,0.00486,0.00486,0.00485,0.00484,0.00483,0.00482,0.00482,0.00481,0.0048,0.00479,0.00478,0.00478,0.00477,0.00476,0.00475,0.00474,0.00474,0.00473,0.00472,0.00471,0.00471,0.0047,0.00469,0.00468,0.00467,0.00467,0.00466,0.00465,0.00464,0.00464,0.00463,0.00462,0.00461,0.0046,0.0046,0.00459};
-
-constexpr double stored_lower_incomplete_gamma_values_n5[] = {0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1e-05,1e-05,1e-05,1e-05,1e-05,1e-05,2e-05,2e-05,2e-05,3e-05,3e-05,3e-05,4e-05,4e-05,5e-05,5e-05,6e-05,6e-05,7e-05,8e-05,8e-05,9e-05,0.0001,0.00011,0.00012,0.00012,0.00013,0.00014,0.00016,0.00017,0.00018,0.00019,0.0002,0.00022,0.00023,0.00024,0.00026,0.00027,0.00029,0.00031,0.00032,0.00034,0.00036,0.00038,0.0004,0.00042,0.00044,0.00046,0.00049,0.00051,0.00053,0.00056,0.00058,0.00061,0.00064,0.00066,0.00069,0.00072,0.00075,0.00078,0.00081,0.00084,0.00088,0.00091,0.00095,0.00098,0.00102,0.00105,0.00109,0.00113,0.00117,0.00121,0.00125,0.0013,0.00134,0.00138,0.00143,0.00147,0.00152,0.00157,0.00162,0.00167,0.00172,0.00177,0.00182,0.00188,
-0.00193,0.00199,0.00204,0.0021,0.00216,0.00222,0.00228,0.00234,0.00241,0.00247,0.00254,0.0026,0.00267,0.00274,0.00281,0.00288,0.00295,0.00302,0.00309,0.00317,0.00325,0.00332,0.0034,0.00348,0.00356,0.00364,0.00373,0.00381,0.0039,0.00398,0.00407,0.00416,0.00425,0.00434,0.00443,0.00453,0.00462,0.00472,0.00481,0.00491,0.00501,0.00511,0.00522,0.00532,0.00542,0.00553,0.00564,0.00575,0.00586,0.00597,0.00608,0.00619,0.00631,0.00643,0.00654,0.00666,0.00678,0.0069,0.00703,0.00715,0.00728,0.0074,0.00753,0.00766,0.00779,0.00793,0.00806,0.00819,0.00833,0.00847,0.00861,0.00875,0.00889,0.00903,0.00918,0.00932,0.00947,0.00962,0.00977,0.00992,0.01008,0.01023,0.01039,0.01055,0.0107,0.01086,0.01103,0.01119,0.01135,0.01152,0.01169,0.01186,0.01203,0.0122,0.01237,0.01255,0.01272,0.0129,0.01308,0.01326,
-0.01345,0.01363,0.01381,0.014,0.01419,0.01438,0.01457,0.01476,0.01496,0.01515,0.01535,0.01555,0.01575,0.01595,0.01616,0.01636,0.01657,0.01677,0.01698,0.0172,0.01741,0.01762,0.01784,0.01805,0.01827,0.01849,0.01872,0.01894,0.01916,0.01939,0.01962,0.01985,0.02008,0.02031,0.02055,0.02078,0.02102,0.02126,0.0215,0.02174,0.02198,0.02223,0.02248,0.02273,0.02298,0.02323,0.02348,0.02373,0.02399,0.02425,0.02451,0.02477,0.02503,0.0253,0.02556,0.02583,0.0261,0.02637,0.02664,0.02692,0.02719,0.02747,0.02775,0.02803,0.02831,0.02859,0.02888,0.02917,0.02945,0.02974,0.03004,0.03033,0.03062,0.03092,0.03122,0.03152,0.03182,0.03212,0.03243,0.03273,0.03304,0.03335,0.03366,0.03397,0.03429,0.03461,0.03492,0.03524,0.03556,0.03589,0.03621,0.03654,0.03686,0.03719,0.03752,0.03785,0.03819,0.03852,0.03886,0.0392,
-0.03954,0.03988,0.04023,0.04057,0.04092,0.04127,0.04162,0.04197,0.04232,0.04268,0.04303,0.04339,0.04375,0.04411,0.04448,0.04484,0.04521,0.04558,0.04595,0.04632,0.04669,0.04707,0.04744,0.04782,0.0482,0.04858,0.04896,0.04935,0.04973,0.05012,0.05051,0.0509,0.0513,0.05169,0.05209,0.05248,0.05288,0.05328,0.05369,0.05409,0.0545,0.0549,0.05531,0.05572,0.05613,0.05655,0.05696,0.05738,0.0578,0.05822,0.05864,0.05907,0.05949,0.05992,0.06035,0.06078,0.06121,0.06164,0.06208,0.06251,0.06295,0.06339,0.06383,0.06427,0.06472,0.06516,0.06561,0.06606,0.06651,0.06697,0.06742,0.06788,0.06833,0.06879,0.06925,0.06971,0.07018,0.07064,0.07111,0.07158,0.07205,0.07252,0.07299,0.07347,0.07395,0.07442,0.0749,0.07539,0.07587,0.07635,0.07684,0.07733,0.07782,0.07831,0.0788,0.07929,0.07979,0.08028,0.08078,0.08128,
-0.08179,0.08229,0.08279,0.0833,0.08381,0.08432,0.08483,0.08534,0.08586,0.08637,0.08689,0.08741,0.08793,0.08845,0.08897,0.0895,0.09003,0.09055,0.09108,0.09162,0.09215,0.09268,0.09322,0.09376,0.09429,0.09484,0.09538,0.09592,0.09647,0.09701,0.09756,0.09811,0.09866,0.09921,0.09977,0.10032,0.10088,0.10144,0.102,0.10256,0.10312,0.10369,0.10426,0.10482,0.10539,0.10596,0.10654,0.10711,0.10768,0.10826,0.10884,0.10942,0.11,0.11058,0.11117,0.11175,0.11234,0.11293,0.11352,0.11411,0.1147,0.1153,0.11589,0.11649,0.11709,0.11769,0.11829,0.11889,0.1195,0.1201,0.12071,0.12132,0.12193,0.12254,0.12316,0.12377,0.12439,0.125,0.12562,0.12624,0.12687,0.12749,0.12811,0.12874,0.12937,0.13,0.13063,0.13126,0.13189,0.13253,0.13316,0.1338,0.13444,0.13508,0.13572,0.13636,0.13701,0.13765,0.1383,0.13895,
-0.1396,0.14025,0.1409,0.14156,0.14221,0.14287,0.14353,0.14418,0.14485,0.14551,0.14617,0.14684,0.1475,0.14817,0.14884,0.14951,0.15018,0.15085,0.15153,0.1522,0.15288,0.15356,0.15424,0.15492,0.1556,0.15628,0.15697,0.15766,0.15834,0.15903,0.15972,0.16041,0.16111,0.1618,0.1625,0.16319,0.16389,0.16459,0.16529,0.16599,0.16669,0.1674,0.1681,0.16881,0.16952,0.17023,0.17094,0.17165,0.17237,0.17308,0.1738,0.17451,0.17523,0.17595,0.17667,0.17739,0.17812,0.17884,0.17957,0.18029,0.18102,0.18175,0.18248,0.18321,0.18395,0.18468,0.18542,0.18615,0.18689,0.18763,0.18837,0.18911,0.18986,0.1906,0.19135,0.19209,0.19284,0.19359,0.19434,0.19509,0.19584,0.1966,0.19735,0.19811,0.19886,0.19962,0.20038,0.20114,0.2019,0.20267,0.20343,0.2042,0.20496,0.20573,0.2065,0.20727,0.20804,0.20881,0.20958,0.21036,
-0.21113,0.21191,0.21269,0.21347,0.21425,0.21503,0.21581,0.21659,0.21738,0.21816,0.21895,0.21974,0.22053,0.22132,0.22211,0.2229,0.22369,0.22449,0.22528,0.22608,0.22688,0.22767,0.22847,0.22928,0.23008,0.23088,0.23168,0.23249,0.23329,0.2341,0.23491,0.23572,0.23653,0.23734,0.23815,0.23897,0.23978,0.24059,0.24141,0.24223,0.24305,0.24387,0.24469,0.24551,0.24633,0.24715,0.24798,0.2488,0.24963,0.25046,0.25129,0.25212,0.25295,0.25378,0.25461,0.25544,0.25628,0.25711,0.25795,0.25879,0.25963,0.26046,0.2613,0.26215,0.26299,0.26383,0.26467,0.26552,0.26637,0.26721,0.26806,0.26891,0.26976,0.27061,0.27146,0.27231,0.27317,0.27402,0.27488,0.27573,0.27659,0.27745,0.2783,0.27916,0.28002,0.28089,0.28175,0.28261,0.28348,0.28434,0.28521,0.28607,0.28694,0.28781,0.28868,0.28955,0.29042,0.29129,0.29217,0.29304,
-0.29391,0.29479,0.29567,0.29654,0.29742,0.2983,0.29918,0.30006,0.30094,0.30182,0.30271,0.30359,0.30448,0.30536,0.30625,0.30713,0.30802,0.30891,0.3098,0.31069,0.31158,0.31247,0.31337,0.31426,0.31515,0.31605,0.31695,0.31784,0.31874,0.31964,0.32054,0.32144,0.32234,0.32324,0.32414,0.32504,0.32595,0.32685,0.32776,0.32866,0.32957,0.33048,0.33138,0.33229,0.3332,0.33411,0.33502,0.33594,0.33685,0.33776,0.33867,0.33959,0.3405,0.34142,0.34234,0.34326,0.34417,0.34509,0.34601,0.34693,0.34785,0.34877,0.3497,0.35062,0.35154,0.35247,0.35339,0.35432,0.35525,0.35617,0.3571,0.35803,0.35896,0.35989,0.36082,0.36175,0.36268,0.36361,0.36455,0.36548,0.36641,0.36735,0.36828,0.36922,0.37016,0.37109,0.37203,0.37297,0.37391,0.37485,0.37579,0.37673,0.37767,0.37862,0.37956,0.3805,0.38145,0.38239,0.38334,0.38428,
-0.38523,0.38618,0.38712,0.38807,0.38902,0.38997,0.39092,0.39187,0.39282,0.39377,0.39473,0.39568,0.39663,0.39759,0.39854,0.3995,0.40045,0.40141,0.40236,0.40332,0.40428,0.40524,0.4062,0.40716,0.40811,0.40908,0.41004,0.411,0.41196,0.41292,0.41388,0.41485,0.41581,0.41678,0.41774,0.41871,0.41967,0.42064,0.42161,0.42257,0.42354,0.42451,0.42548,0.42645,0.42742,0.42839,0.42936,0.43033,0.4313,0.43227,0.43325,0.43422,0.43519,0.43617,0.43714,0.43812,0.43909,0.44007,0.44104,0.44202,0.443,0.44398,0.44495,0.44593,0.44691,0.44789,0.44887,0.44985,0.45083,0.45181,0.45279,0.45377,0.45476,0.45574,0.45672,0.4577,0.45869,0.45967,0.46066,0.46164,0.46263,0.46361,0.4646,0.46559,0.46657,0.46756,0.46855,0.46953,0.47052,0.47151,0.4725,0.47349,0.47448,0.47547,0.47646,0.47745,0.47844,0.47943,0.48043,0.48142,
-0.48241,0.4834,0.4844,0.48539,0.48638,0.48738,0.48837,0.48937,0.49036,0.49136,0.49235,0.49335,0.49435,0.49534,0.49634,0.49734,0.49834,0.49933,0.50033,0.50133,0.50233,0.50333,0.50433,0.50533,0.50633,0.50733,0.50833,0.50933,0.51033,0.51133,0.51233,0.51333,0.51434,0.51534,0.51634,0.51735,0.51835,0.51935,0.52036,0.52136,0.52236,0.52337,0.52437,0.52538,0.52638,0.52739,0.52839,0.5294,0.53041,0.53141,0.53242,0.53343,0.53443,0.53544,0.53645,0.53746,0.53846,0.53947,0.54048,0.54149,0.5425,0.54351,0.54452,0.54552,0.54653,0.54754,0.54855,0.54956,0.55057,0.55159,0.5526,0.55361,0.55462,0.55563,0.55664,0.55765,0.55866,0.55968,0.56069,0.5617,0.56271,0.56372,0.56474,0.56575,0.56676,0.56778,0.56879,0.5698,0.57082,0.57183,0.57284,0.57386,0.57487,0.57589,0.5769,0.57792,0.57893,0.57995,0.58096,0.58198,
-0.58299,0.58401,0.58502,0.58604,0.58705,0.58807,0.58908,0.5901,0.59112,0.59213,0.59315,0.59417,0.59518,0.5962,0.59722,0.59823,0.59925,0.60027,0.60128,0.6023,0.60332,0.60434,0.60535,0.60637,0.60739,0.60841,0.60942,0.61044,0.61146,0.61248,0.61349,0.61451,0.61553,0.61655,0.61757,0.61859,0.6196,0.62062,0.62164,0.62266,0.62368,0.6247,0.62571,0.62673,0.62775,0.62877,0.62979,0.63081,0.63183,0.63284,0.63386,0.63488,0.6359,0.63692,0.63794,0.63896,0.63998,0.641,0.64201,0.64303,0.64405,0.64507,0.64609,0.64711,0.64813,0.64915,0.65017,0.65119,0.6522,0.65322,0.65424,0.65526,0.65628,0.6573,0.65832,0.65934,0.66035,0.66137,0.66239,0.66341,0.66443,0.66545,0.66647,0.66749,0.6685,0.66952,0.67054,0.67156,0.67258,0.6736,0.67461,0.67563,0.67665,0.67767,0.67869,0.67971,0.68072,0.68174,0.68276,0.68378,
-0.68479,0.68581,0.68683,0.68785,0.68887,0.68988,0.6909,0.69192,0.69293,0.69395,0.69497,0.69599,0.697,0.69802,0.69904,0.70005,0.70107,0.70209,0.7031,0.70412,0.70513,0.70615,0.70717,0.70818,0.7092,0.71021,0.71123,0.71224,0.71326,0.71427,0.71529,0.7163,0.71732,0.71833,0.71935,0.72036,0.72138,0.72239,0.72341,0.72442,0.72543,0.72645,0.72746,0.72847,0.72949,0.7305,0.73151,0.73253,0.73354,0.73455,0.73557,0.73658,0.73759,0.7386,0.73961,0.74063,0.74164,0.74265,0.74366,0.74467,0.74568,0.74669,0.7477,0.74871,0.74972,0.75073,0.75174,0.75275,0.75376,0.75477,0.75578,0.75679,0.7578,0.75881,0.75982,0.76083,0.76183,0.76284,0.76385,0.76486,0.76587,0.76687,0.76788,0.76889,0.76989,0.7709,0.77191,0.77291,0.77392,0.77492,0.77593,0.77694,0.77794,0.77895,0.77995,0.78096,0.78196,0.78296,0.78397,0.78497,
-0.78597,0.78698,0.78798,0.78898,0.78999,0.79099,0.79199,0.79299,0.79399,0.795,0.796,0.797,0.798,0.799,0.8,0.801,0.802,0.803,0.804,0.805,0.806,0.807,0.80799,0.80899,0.80999,0.81099,0.81198,0.81298,0.81398,0.81498,0.81597,0.81697,0.81796,0.81896,0.81996,0.82095,0.82195,0.82294,0.82393,0.82493,0.82592,0.82692,0.82791,0.8289,0.82989,0.83089,0.83188,0.83287,0.83386,0.83485,0.83584,0.83684,0.83783,0.83882,0.83981,0.8408,0.84179,0.84277,0.84376,0.84475,0.84574,0.84673,0.84772,0.8487,0.84969,0.85068,0.85166,0.85265,0.85364,0.85462,0.85561,0.85659,0.85758,0.85856,0.85954,0.86053,0.86151,0.86249,0.86348,0.86446,0.86544,0.86642,0.8674,0.86839,0.86937,0.87035,0.87133,0.87231,0.87329,0.87427,0.87525,0.87622,0.8772,0.87818,0.87916,0.88014,0.88111,0.88209,0.88307,0.88404,
-0.88502,0.88599,0.88697,0.88794,0.88892,0.88989,0.89086,0.89184,0.89281,0.89378,0.89476,0.89573,0.8967,0.89767,0.89864,0.89961,0.90058,0.90155,0.90252,0.90349,0.90446,0.90543,0.9064,0.90736,0.90833,0.9093,0.91026,0.91123,0.9122,0.91316,0.91413,0.91509,0.91606,0.91702,0.91798,0.91895,0.91991,0.92087,0.92184,0.9228,0.92376,0.92472,0.92568,0.92664,0.9276,0.92856,0.92952,0.93048,0.93144,0.9324,0.93335,0.93431,0.93527,0.93622,0.93718,0.93814,0.93909,0.94005,0.941,0.94196,0.94291,0.94386,0.94482,0.94577,0.94672,0.94767,0.94862,0.94957,0.95053,0.95148,0.95243,0.95338,0.95432,0.95527,0.95622,0.95717,0.95812,0.95906,0.96001,0.96096,0.9619,0.96285,0.96379,0.96474,0.96568,0.96662,0.96757,0.96851,0.96945,0.9704,0.97134,0.97228,0.97322,0.97416,0.9751,0.97604,0.97698,0.97792,0.97886,0.97979,
-0.98073,0.98167,0.9826,0.98354,0.98448,0.98541,0.98635,0.98728,0.98822,0.98915,0.99008,0.99102,0.99195,0.99288,0.99381,0.99474,0.99567,0.9966,0.99753,0.99846,0.99939,1.00032,1.00125,1.00217,1.0031,1.00403,1.00495,1.00588,1.0068,1.00773,1.00865,1.00958,1.0105,1.01142,1.01235,1.01327,1.01419,1.01511,1.01603,1.01695,1.01787,1.01879,1.01971,1.02063,1.02155,1.02247,1.02338,1.0243,1.02522,1.02613,1.02705,1.02796,1.02888,1.02979,1.0307,1.03162,1.03253,1.03344,1.03435,1.03526,1.03618,1.03709,1.038,1.0389,1.03981,1.04072,1.04163,1.04254,1.04344,1.04435,1.04526,1.04616,1.04707,1.04797,1.04888,1.04978,1.05068,1.05158,1.05249,1.05339,1.05429,1.05519,1.05609,1.05699,1.05789,1.05879,1.05969,1.06059,1.06148,1.06238,1.06328,1.06417,1.06507,1.06596,1.06686,1.06775,1.06864,1.06954,1.07043,1.07132,
-1.07221,1.0731,1.074,1.07489,1.07578,1.07666,1.07755,1.07844,1.07933,1.08022,1.0811,1.08199,1.08287,1.08376,1.08464,1.08553,1.08641,1.0873,1.08818,1.08906,1.08994,1.09082,1.0917,1.09258,1.09346,1.09434,1.09522,1.0961,1.09698,1.09785,1.09873,1.09961,1.10048,1.10136,1.10223,1.10311,1.10398,1.10485,1.10573,1.1066,1.10747,1.10834,1.10921,1.11008,1.11095,1.11182,1.11269,1.11356,1.11442,1.11529,1.11616,1.11702,1.11789,1.11875,1.11962,1.12048,1.12135,1.12221,1.12307,1.12393,1.1248,1.12566,1.12652,1.12738,1.12824,1.12909,1.12995,1.13081,1.13167,1.13252,1.13338,1.13424,1.13509,1.13595,1.1368,1.13765,1.13851,1.13936,1.14021,1.14106,1.14191,1.14276,1.14361,1.14446,1.14531,1.14616,1.14701,1.14786,1.1487,1.14955,1.15039,1.15124,1.15208,1.15293,1.15377,1.15461,1.15546,1.1563,1.15714,1.15798,
-1.15882,1.15966,1.1605,1.16134,1.16218,1.16302,1.16385,1.16469,1.16553,1.16636,1.1672,1.16803,1.16887,1.1697,1.17053,1.17136,1.1722,1.17303,1.17386,1.17469,1.17552,1.17635,1.17718,1.178,1.17883,1.17966,1.18049,1.18131,1.18214,1.18296,1.18379,1.18461,1.18543,1.18626,1.18708,1.1879,1.18872,1.18954,1.19036,1.19118,1.192,1.19282,1.19364,1.19445,1.19527,1.19609,1.1969,1.19772,1.19853,1.19935,1.20016,1.20097,1.20178,1.2026,1.20341,1.20422,1.20503,1.20584,1.20665,1.20746,1.20826,1.20907,1.20988,1.21068,1.21149,1.2123,1.2131,1.2139,1.21471,1.21551,1.21631,1.21712,1.21792,1.21872,1.21952,1.22032,1.22112,1.22192,1.22271,1.22351,1.22431,1.22511,1.2259,1.2267,1.22749,1.22829,1.22908,1.22987,1.23067,1.23146,1.23225,1.23304,1.23383,1.23462,1.23541,1.2362,1.23699,1.23777,1.23856,1.23935,
-1.24013,1.24092,1.2417,1.24249,1.24327,1.24406,1.24484,1.24562,1.2464,1.24718,1.24796,1.24874,1.24952,1.2503,1.25108,1.25186,1.25263,1.25341,1.25419,1.25496,1.25574,1.25651,1.25729,1.25806,1.25883,1.2596,1.26037,1.26115,1.26192,1.26269,1.26346,1.26422,1.26499,1.26576,1.26653,1.26729,1.26806,1.26882,1.26959,1.27035,1.27112,1.27188,1.27264,1.27341,1.27417,1.27493,1.27569,1.27645,1.27721,1.27797,1.27872,1.27948,1.28024,1.28099,1.28175,1.28251,1.28326,1.28401,1.28477,1.28552,1.28627,1.28703,1.28778,1.28853,1.28928,1.29003,1.29078,1.29153,1.29227,1.29302,1.29377,1.29451,1.29526,1.29601,1.29675,1.29749,1.29824,1.29898,1.29972,1.30046,1.30121,1.30195,1.30269,1.30343,1.30417,1.3049,1.30564,1.30638,1.30712,1.30785,1.30859,1.30932,1.31006,1.31079,1.31152,1.31226,1.31299,1.31372,1.31445,1.31518,
-1.31591,1.31664,1.31737,1.3181,1.31883,1.31955,1.32028,1.32101,1.32173,1.32246,1.32318,1.32391,1.32463,1.32535,1.32607,1.3268,1.32752,1.32824,1.32896,1.32968,1.3304,1.33111,1.33183,1.33255,1.33326,1.33398,1.3347,1.33541,1.33612,1.33684,1.33755,1.33826,1.33898,1.33969,1.3404,1.34111,1.34182,1.34253,1.34324,1.34394,1.34465,1.34536,1.34607,1.34677,1.34748,1.34818,1.34889,1.34959,1.35029,1.35099,1.3517,1.3524,1.3531,1.3538,1.3545,1.3552,1.3559,1.35659,1.35729,1.35799,1.35868,1.35938,1.36007,1.36077,1.36146,1.36216,1.36285,1.36354,1.36423,1.36493,1.36562,1.36631,1.367,1.36768,1.36837,1.36906,1.36975,1.37043,1.37112,1.37181,1.37249,1.37318,1.37386,1.37454,1.37523,1.37591,1.37659,1.37727,1.37795,1.37863,1.37931,1.37999,1.38067,1.38135,1.38202,1.3827,1.38338,1.38405,1.38473,1.3854,
-1.38607,1.38675,1.38742,1.38809,1.38876,1.38944,1.39011,1.39078,1.39145,1.39211,1.39278,1.39345,1.39412,1.39478,1.39545,1.39612,1.39678,1.39744,1.39811,1.39877,1.39943,1.4001,1.40076,1.40142,1.40208,1.40274,1.4034,1.40406,1.40472,1.40537,1.40603,1.40669,1.40734,1.408,1.40865,1.40931,1.40996,1.41061,1.41127,1.41192,1.41257,1.41322,1.41387,1.41452,1.41517,1.41582,1.41647,1.41712,1.41776,1.41841,1.41906,1.4197,1.42035,1.42099,1.42164,1.42228,1.42292,1.42356,1.4242,1.42485,1.42549,1.42613,1.42677,1.4274,1.42804,1.42868,1.42932,1.42995,1.43059,1.43123,1.43186,1.4325,1.43313,1.43376,1.4344,1.43503,1.43566,1.43629,1.43692,1.43755,1.43818,1.43881,1.43944,1.44007,1.44069,1.44132,1.44195,1.44257,1.4432,1.44382,1.44445,1.44507,1.44569,1.44631,1.44694,1.44756,1.44818,1.4488,1.44942,1.45004,
-1.45066,1.45127,1.45189,1.45251,1.45312,1.45374,1.45436,1.45497,1.45558,1.4562,1.45681,1.45742,1.45804,1.45865,1.45926,1.45987,1.46048,1.46109,1.4617,1.4623,1.46291,1.46352,1.46413,1.46473,1.46534,1.46594,1.46655,1.46715,1.46775,1.46836,1.46896,1.46956,1.47016,1.47076,1.47136,1.47196,1.47256,1.47316,1.47376,1.47436,1.47495,1.47555,1.47614,1.47674,1.47733,1.47793,1.47852,1.47912,1.47971,1.4803,1.48089,1.48148,1.48207,1.48266,1.48325,1.48384,1.48443,1.48502,1.4856,1.48619,1.48678,1.48736,1.48795,1.48853,1.48912,1.4897,1.49028,1.49087,1.49145,1.49203,1.49261,1.49319,1.49377,1.49435,1.49493,1.49551,1.49608,1.49666,1.49724,1.49781,1.49839,1.49896,1.49954,1.50011,1.50069,1.50126,1.50183,1.5024,1.50297,1.50354,1.50411,1.50468,1.50525,1.50582,1.50639,1.50696,1.50753,1.50809,1.50866,1.50922,
-1.50979,1.51035,1.51092,1.51148,1.51204,1.5126,1.51317,1.51373,1.51429,1.51485,1.51541,1.51597,1.51653,1.51708,1.51764,1.5182,1.51876,1.51931,1.51987,1.52042,1.52098,1.52153,1.52208,1.52264,1.52319,1.52374,1.52429,1.52484,1.52539,1.52594,1.52649,1.52704,1.52759,1.52814,1.52869,1.52923,1.52978,1.53032,1.53087,1.53141,1.53196,1.5325,1.53305,1.53359,1.53413,1.53467,1.53521,1.53575,1.53629,1.53683,1.53737,1.53791,1.53845,1.53899,1.53952,1.54006,1.54059,1.54113,1.54167,1.5422,1.54273,1.54327,1.5438,1.54433,1.54486,1.5454,1.54593,1.54646,1.54699,1.54752,1.54804,1.54857,1.5491,1.54963,1.55015,1.55068,1.55121,1.55173,1.55226,1.55278,1.5533,1.55383,1.55435,1.55487,1.55539,1.55592,1.55644,1.55696,1.55748,1.558,1.55851,1.55903,1.55955,1.56007,1.56058,1.5611,1.56162,1.56213,1.56265,1.56316,
-1.56367,1.56419,1.5647,1.56521,1.56572,1.56623,1.56674,1.56726,1.56776,1.56827,1.56878,1.56929,1.5698,1.57031,1.57081,1.57132,1.57182,1.57233,1.57283,1.57334,1.57384,1.57435,1.57485,1.57535,1.57585,1.57635,1.57685,1.57735,1.57785,1.57835,1.57885,1.57935,1.57985,1.58035,1.58084,1.58134,1.58184,1.58233,1.58283,1.58332,1.58381,1.58431,1.5848,1.58529,1.58578,1.58628,1.58677,1.58726,1.58775,1.58824,1.58873,1.58921,1.5897,1.59019,1.59068,1.59116,1.59165,1.59214,1.59262,1.59311,1.59359,1.59407,1.59456,1.59504,1.59552,1.596,1.59648,1.59697,1.59745,1.59793,1.59841,1.59888,1.59936,1.59984,1.60032,1.60079,1.60127,1.60175,1.60222,1.6027,1.60317,1.60365,1.60412,1.60459,1.60507,1.60554,1.60601,1.60648,1.60695,1.60742,1.60789,1.60836,1.60883,1.6093,1.60977,1.61023,1.6107,1.61117,1.61163,1.6121,
-1.61256,1.61303,1.61349,1.61396,1.61442,1.61488,1.61535,1.61581,1.61627,1.61673,1.61719,1.61765,1.61811,1.61857,1.61903,1.61949,1.61994,1.6204,1.62086,1.62131,1.62177,1.62222,1.62268,1.62313,1.62359,1.62404,1.62449,1.62495,1.6254,1.62585,1.6263,1.62675,1.6272,1.62765,1.6281,1.62855,1.629,1.62945,1.62989,1.63034,1.63079,1.63123,1.63168,1.63212,1.63257,1.63301,1.63346,1.6339,1.63434,1.63478,1.63523,1.63567,1.63611,1.63655,1.63699,1.63743,1.63787,1.63831,1.63875,1.63918,1.63962,1.64006,1.64049,1.64093,1.64137,1.6418,1.64224,1.64267,1.6431,1.64354,1.64397,1.6444,1.64484,1.64527,1.6457,1.64613,1.64656,1.64699,1.64742,1.64785,1.64828,1.6487,1.64913,1.64956,1.64998,1.65041,1.65084,1.65126,1.65169,1.65211,1.65253,1.65296,1.65338,1.6538,1.65423,1.65465,1.65507,1.65549,1.65591,1.65633,
-1.65675,1.65717,1.65759,1.65801,1.65842,1.65884,1.65926,1.65967,1.66009,1.66051,1.66092,1.66134,1.66175,1.66216,1.66258,1.66299,1.6634,1.66382,1.66423,1.66464,1.66505,1.66546,1.66587,1.66628,1.66669,1.6671,1.66751,1.66791,1.66832,1.66873,1.66913,1.66954,1.66995,1.67035,1.67076,1.67116,1.67156,1.67197,1.67237,1.67277,1.67318,1.67358,1.67398,1.67438,1.67478,1.67518,1.67558,1.67598,1.67638,1.67678,1.67717,1.67757,1.67797,1.67837,1.67876,1.67916,1.67955,1.67995,1.68034,1.68074,1.68113,1.68152,1.68192,1.68231,1.6827,1.68309,1.68348,1.68387,1.68426,1.68465,1.68504,1.68543,1.68582,1.68621,1.6866,1.68699,1.68737,1.68776,1.68815,1.68853,1.68892,1.6893,1.68969,1.69007,1.69045,1.69084,1.69122,1.6916,1.69198,1.69237,1.69275,1.69313,1.69351,1.69389,1.69427,1.69465,1.69503,1.69541,1.69578,1.69616,
-1.69654,1.69691,1.69729,1.69767,1.69804,1.69842,1.69879,1.69917,1.69954,1.69991,1.70029,1.70066,1.70103,1.7014,1.70178,1.70215,1.70252,1.70289,1.70326,1.70363,1.704,1.70436,1.70473,1.7051,1.70547,1.70583,1.7062,1.70657,1.70693,1.7073,1.70766,1.70803,1.70839,1.70876,1.70912,1.70948,1.70985,1.71021,1.71057,1.71093,1.71129,1.71165,1.71201,1.71237,1.71273,1.71309,1.71345,1.71381,1.71417,1.71452,1.71488,1.71524,1.71559,1.71595,1.71631,1.71666,1.71702,1.71737,1.71772,1.71808,1.71843,1.71878,1.71914,1.71949,1.71984,1.72019,1.72054,1.72089,1.72124,1.72159,1.72194,1.72229,1.72264,1.72299,1.72334,1.72368,1.72403,1.72438,1.72472,1.72507,1.72542,1.72576,1.72611,1.72645,1.72679,1.72714,1.72748,1.72782,1.72817,1.72851,1.72885,1.72919,1.72953,1.72987,1.73021,1.73055,1.73089,1.73123,1.73157,1.73191,
-1.73225,1.73258,1.73292,1.73326,1.73359,1.73393,1.73427,1.7346,1.73494,1.73527,1.7356,1.73594,1.73627,1.73661,1.73694,1.73727,1.7376,1.73793,1.73826,1.7386,1.73893,1.73926,1.73959,1.73992,1.74024,1.74057,1.7409,1.74123,1.74156,1.74188,1.74221,1.74254,1.74286,1.74319,1.74351,1.74384,1.74416,1.74449,1.74481,1.74514,1.74546,1.74578,1.7461,1.74643,1.74675,1.74707,1.74739,1.74771,1.74803,1.74835,1.74867,1.74899,1.74931,1.74963,1.74994,1.75026,1.75058,1.7509,1.75121,1.75153,1.75185,1.75216,1.75248,1.75279,1.75311,1.75342,1.75373,1.75405,1.75436,1.75467,1.75499,1.7553,1.75561,1.75592,1.75623,1.75654,1.75685,1.75716,1.75747,1.75778,1.75809,1.7584,1.75871,1.75901,1.75932,1.75963,1.75994,1.76024,1.76055,1.76085,1.76116,1.76146,1.76177,1.76207,1.76238,1.76268,1.76298,1.76329,1.76359,1.76389,
-1.76419,1.7645,1.7648,1.7651,1.7654,1.7657,1.766,1.7663,1.7666,1.7669,1.76719,1.76749,1.76779,1.76809,1.76838,1.76868,1.76898,1.76927,1.76957,1.76987,1.77016,1.77046,1.77075,1.77104,1.77134,1.77163,1.77192,1.77222,1.77251,1.7728,1.77309,1.77338,1.77368,1.77397,1.77426,1.77455,1.77484,1.77513,1.77541,1.7757,1.77599,1.77628,1.77657,1.77685,1.77714,1.77743,1.77771,1.778,1.77829,1.77857,1.77886,1.77914,1.77943,1.77971,1.77999,1.78028,1.78056,1.78084,1.78113,1.78141,1.78169,1.78197,1.78225,1.78253,1.78281,1.78309,1.78337,1.78365,1.78393,1.78421,1.78449,1.78477,1.78505,1.78532,1.7856,1.78588,1.78615,1.78643,1.78671,1.78698,1.78726,1.78753,1.78781,1.78808,1.78836,1.78863,1.7889,1.78918,1.78945,1.78972,1.78999,1.79027,1.79054,1.79081,1.79108,1.79135,1.79162,1.79189,1.79216,1.79243,
-1.7927,1.79297,1.79323,1.7935,1.79377,1.79404,1.7943,1.79457,1.79484,1.7951,1.79537,1.79564,1.7959,1.79617,1.79643,1.79669,1.79696,1.79722,1.79749,1.79775,1.79801,1.79827,1.79854,1.7988,1.79906,1.79932,1.79958,1.79984,1.8001,1.80036,1.80062,1.80088,1.80114,1.8014,1.80166,1.80192,1.80217,1.80243,1.80269,1.80295,1.8032,1.80346,1.80371,1.80397,1.80423,1.80448,1.80474,1.80499,1.80524,1.8055,1.80575,1.80601,1.80626,1.80651,1.80676,1.80702,1.80727,1.80752,1.80777,1.80802,1.80827,1.80852,1.80877,1.80902,1.80927,1.80952,1.80977,1.81002,1.81027,1.81052,1.81076,1.81101,1.81126,1.8115,1.81175,1.812,1.81224,1.81249,1.81273,1.81298,1.81322,1.81347,1.81371,1.81396,1.8142,1.81444,1.81469,1.81493,1.81517,1.81541,1.81566,1.8159,1.81614,1.81638,1.81662,1.81686,1.8171,1.81734,1.81758,1.81782,
-1.81806,1.8183,1.81854,1.81877,1.81901,1.81925,1.81949,1.81972,1.81996,1.8202,1.82043,1.82067,1.82091,1.82114,1.82138,1.82161,1.82185,1.82208,1.82231,1.82255,1.82278,1.82301,1.82325,1.82348,1.82371,1.82394,1.82418,1.82441,1.82464,1.82487,1.8251,1.82533,1.82556,1.82579,1.82602,1.82625,1.82648,1.82671,1.82693,1.82716,1.82739,1.82762,1.82785,1.82807,1.8283,1.82853,1.82875,1.82898,1.8292,1.82943,1.82965,1.82988,1.8301,1.83033,1.83055,1.83078,1.831,1.83122,1.83145,1.83167,1.83189,1.83211,1.83233,1.83256,1.83278,1.833,1.83322,1.83344,1.83366,1.83388,1.8341,1.83432,1.83454,1.83476,1.83498,1.83519,1.83541,1.83563,1.83585,1.83607,1.83628,1.8365,1.83672,1.83693,1.83715,1.83736,1.83758,1.8378,1.83801,1.83823,1.83844,1.83865,1.83887,1.83908,1.8393,1.83951,1.83972,1.83993,1.84015,1.84036,
-1.84057,1.84078,1.84099,1.8412,1.84142,1.84163,1.84184,1.84205,1.84226,1.84247,1.84268,1.84288,1.84309,1.8433,1.84351,1.84372,1.84393,1.84413,1.84434,1.84455,1.84476,1.84496,1.84517,1.84537,1.84558,1.84579,1.84599,1.8462,1.8464,1.84661,1.84681,1.84701,1.84722,1.84742,1.84762,1.84783,1.84803,1.84823,1.84844,1.84864,1.84884,1.84904,1.84924,1.84944,1.84964,1.84984,1.85005,1.85025,1.85045,1.85064,1.85084,1.85104,1.85124,1.85144,1.85164,1.85184,1.85204,1.85223,1.85243,1.85263,1.85282,1.85302,1.85322,1.85341,1.85361,1.85381,1.854,1.8542,1.85439,1.85459,1.85478,1.85497,1.85517,1.85536,1.85556,1.85575,1.85594,1.85614,1.85633,1.85652,1.85671,1.8569,1.8571,1.85729,1.85748,1.85767,1.85786,1.85805,1.85824,1.85843,1.85862,1.85881,1.859,1.85919,1.85938,1.85957,1.85975,1.85994,1.86013,1.86032,
-1.86051,1.86069,1.86088,1.86107,1.86125,1.86144,1.86163,1.86181,1.862,1.86218,1.86237,1.86255,1.86274,1.86292,1.86311,1.86329,1.86347,1.86366,1.86384,1.86402,1.86421,1.86439,1.86457,1.86475,1.86494,1.86512,1.8653,1.86548,1.86566,1.86584,1.86602,1.8662,1.86638,1.86656,1.86674,1.86692,1.8671,1.86728,1.86746,1.86764,1.86782,1.868,1.86817,1.86835,1.86853,1.86871,1.86888,1.86906,1.86924,1.86941,1.86959,1.86976,1.86994,1.87012,1.87029,1.87047,1.87064,1.87082,1.87099,1.87116,1.87134,1.87151,1.87169,1.87186,1.87203,1.87221,1.87238,1.87255,1.87272,1.87289,1.87307,1.87324,1.87341,1.87358,1.87375,1.87392,1.87409,1.87426,1.87443,1.8746,1.87477,1.87494,1.87511,1.87528,1.87545,1.87562,1.87579,1.87595,1.87612,1.87629,1.87646,1.87663,1.87679,1.87696,1.87713,1.87729,1.87746,1.87763,1.87779,1.87796,
-1.87812,1.87829,1.87845,1.87862,1.87878,1.87895,1.87911,1.87928,1.87944,1.8796,1.87977,1.87993,1.88009,1.88025,1.88042,1.88058,1.88074,1.8809,1.88107,1.88123,1.88139,1.88155,1.88171,1.88187,1.88203,1.88219,1.88235,1.88251,1.88267,1.88283,1.88299,1.88315,1.88331,1.88347,1.88363,1.88378,1.88394,1.8841,1.88426,1.88442,1.88457,1.88473,1.88489,1.88504,1.8852,1.88536,1.88551,1.88567,1.88583,1.88598,1.88614,1.88629,1.88645,1.8866,1.88676,1.88691,1.88706,1.88722,1.88737,1.88753,1.88768,1.88783,1.88799,1.88814,1.88829,1.88844,1.8886,1.88875,1.8889,1.88905,1.8892,1.88935,1.8895,1.88966,1.88981,1.88996,1.89011,1.89026,1.89041,1.89056,1.89071,1.89086,1.89101,1.89115,1.8913,1.89145,1.8916,1.89175,1.8919,1.89204,1.89219,1.89234,1.89249,1.89263,1.89278,1.89293,1.89307,1.89322,1.89337,1.89351,
-1.89366,1.8938,1.89395,1.89409,1.89424,1.89438,1.89453,1.89467,1.89482,1.89496,1.89511,1.89525,1.89539,1.89554,1.89568,1.89582,1.89597,1.89611,1.89625,1.89639,1.89654,1.89668,1.89682,1.89696,1.8971,1.89724,1.89738,1.89752,1.89767,1.89781,1.89795,1.89809,1.89823,1.89837,1.89851,1.89865,1.89878,1.89892,1.89906,1.8992,1.89934,1.89948,1.89962,1.89975,1.89989,1.90003,1.90017,1.9003,1.90044,1.90058,1.90071,1.90085,1.90099,1.90112,1.90126,1.9014,1.90153,1.90167,1.9018,1.90194,1.90207,1.90221,1.90234,1.90248,1.90261,1.90275,1.90288,1.90301,1.90315,1.90328,1.90341,1.90355,1.90368,1.90381,1.90394,1.90408,1.90421,1.90434,1.90447,1.9046,1.90474,1.90487,1.905,1.90513,1.90526,1.90539,1.90552,1.90565,1.90578,1.90591,1.90604,1.90617,1.9063,1.90643,1.90656,1.90669,1.90682,1.90695,1.90708,1.9072,
-1.90733,1.90746,1.90759,1.90772,1.90784,1.90797,1.9081,1.90823,1.90835,1.90848,1.90861,1.90873,1.90886,1.90898,1.90911,1.90924,1.90936,1.90949,1.90961,1.90974,1.90986,1.90999,1.91011,1.91024,1.91036,1.91048,1.91061,1.91073,1.91086,1.91098,1.9111,1.91123,1.91135,1.91147,1.91159,1.91172,1.91184,1.91196,1.91208,1.91221,1.91233,1.91245,1.91257,1.91269,1.91281,1.91293,1.91305,1.91317,1.9133,1.91342,1.91354,1.91366,1.91378,1.9139,1.91401,1.91413,1.91425,1.91437,1.91449,1.91461,1.91473,1.91485,1.91497,1.91508,1.9152,1.91532,1.91544,1.91555,1.91567,1.91579,1.91591,1.91602,1.91614,1.91626,1.91637,1.91649,1.91661,1.91672,1.91684,1.91695,1.91707,1.91718,1.9173,1.91741,1.91753,1.91764,1.91776,1.91787,1.91799,1.9181,1.91822,1.91833,1.91844,1.91856,1.91867,1.91878,1.9189,1.91901,1.91912,1.91923,
-1.91935,1.91946,1.91957,1.91968,1.9198,1.91991,1.92002,1.92013,1.92024,1.92035,1.92046,1.92058,1.92069,1.9208,1.92091,1.92102,1.92113,1.92124,1.92135,1.92146,1.92157,1.92168,1.92179,1.9219,1.922,1.92211,1.92222,1.92233,1.92244,1.92255,1.92266,1.92276,1.92287,1.92298,1.92309,1.9232,1.9233,1.92341,1.92352,1.92362,1.92373,1.92384,1.92394,1.92405,1.92416,1.92426,1.92437,1.92447,1.92458,1.92469,1.92479,1.9249,1.925,1.92511,1.92521,1.92532,1.92542,1.92552,1.92563,1.92573,1.92584,1.92594,1.92604,1.92615,1.92625,1.92636,1.92646,1.92656,1.92666,1.92677,1.92687,1.92697,1.92707,1.92718,1.92728,1.92738,1.92748,1.92758,1.92769,1.92779,1.92789,1.92799,1.92809,1.92819,1.92829,1.92839,1.92849,1.92859,1.92869,1.92879,1.92889,1.92899,1.92909,1.92919,1.92929,1.92939,1.92949,1.92959,1.92969,1.92979,
-1.92989,1.92999,1.93008,1.93018,1.93028,1.93038,1.93048,1.93057,1.93067,1.93077,1.93087,1.93096,1.93106,1.93116,1.93125,1.93135,1.93145,1.93154,1.93164,1.93174,1.93183,1.93193,1.93202,1.93212,1.93222,1.93231,1.93241,1.9325,1.9326,1.93269,1.93279,1.93288,1.93298,1.93307,1.93316,1.93326,1.93335,1.93345,1.93354,1.93363,1.93373,1.93382,1.93391,1.93401,1.9341,1.93419,1.93429,1.93438,1.93447,1.93456,1.93466,1.93475,1.93484,1.93493,1.93502,1.93512,1.93521,1.9353,1.93539,1.93548,1.93557,1.93566,1.93575,1.93584,1.93593,1.93603,1.93612,1.93621,1.9363,1.93639,1.93648,1.93657,1.93666,1.93674,1.93683,1.93692,1.93701,1.9371,1.93719,1.93728,1.93737,1.93746,1.93754,1.93763,1.93772,1.93781,1.9379,1.93799,1.93807,1.93816,1.93825,1.93834,1.93842,1.93851,1.9386,1.93868,1.93877,1.93886,1.93894,1.93903,
-1.93912,1.9392,1.93929,1.93937,1.93946,1.93955,1.93963,1.93972,1.9398,1.93989,1.93997,1.94006,1.94014,1.94023,1.94031,1.9404,1.94048,1.94057,1.94065,1.94074,1.94082,1.9409,1.94099,1.94107,1.94115,1.94124,1.94132,1.9414,1.94149,1.94157,1.94165,1.94174,1.94182,1.9419,1.94198,1.94207,1.94215,1.94223,1.94231,1.94239,1.94248,1.94256,1.94264,1.94272,1.9428,1.94288,1.94296,1.94305,1.94313,1.94321,1.94329,1.94337,1.94345,1.94353,1.94361,1.94369,1.94377,1.94385,1.94393,1.94401,1.94409,1.94417,1.94425,1.94433,1.94441,1.94449,1.94456,1.94464,1.94472,1.9448,1.94488,1.94496,1.94504,1.94511,1.94519,1.94527,1.94535,1.94543,1.9455,1.94558,1.94566,1.94574,1.94581,1.94589,1.94597,1.94605,1.94612,1.9462,1.94628,1.94635,1.94643,1.94651,1.94658,1.94666,1.94673,1.94681,1.94689,1.94696,1.94704,1.94711,
-1.94719,1.94726,1.94734,1.94741,1.94749,1.94756,1.94764,1.94771,1.94779,1.94786,1.94794,1.94801,1.94809,1.94816,1.94823,1.94831,1.94838,1.94845,1.94853,1.9486,1.94868,1.94875,1.94882,1.94889,1.94897,1.94904,1.94911,1.94919,1.94926,1.94933,1.9494,1.94948,1.94955,1.94962,1.94969,1.94976,1.94984,1.94991,1.94998,1.95005,1.95012,1.95019,1.95027,1.95034,1.95041,1.95048,1.95055,1.95062,1.95069,1.95076,1.95083,1.9509,1.95097,1.95104,1.95111,1.95118,1.95125,1.95132,1.95139,1.95146,1.95153,1.9516,1.95167,1.95174,1.95181,1.95188,1.95195,1.95202,1.95208,1.95215,1.95222,1.95229,1.95236,1.95243,1.95249,1.95256,1.95263,1.9527,1.95277,1.95283,1.9529,1.95297,1.95304,1.9531,1.95317,1.95324,1.95331,1.95337,1.95344,1.95351,1.95357,1.95364,1.95371,1.95377,1.95384,1.95391,1.95397,1.95404,1.9541,1.95417,
-1.95424,1.9543,1.95437,1.95443,1.9545,1.95456,1.95463,1.95469,1.95476,1.95482,1.95489,1.95495,1.95502,1.95508,1.95515,1.95521,1.95528,1.95534,1.95541,1.95547,1.95553,1.9556,1.95566,1.95573,1.95579,1.95585,1.95592,1.95598,1.95604,1.95611,1.95617,1.95623,1.9563,1.95636,1.95642,1.95648,1.95655,1.95661,1.95667,1.95673,1.9568,1.95686,1.95692,1.95698,1.95704,1.95711,1.95717,1.95723,1.95729,1.95735,1.95741,1.95748,1.95754,1.9576,1.95766,1.95772,1.95778,1.95784,1.9579,1.95796,1.95802,1.95808,1.95815,1.95821,1.95827,1.95833,1.95839,1.95845,1.95851,1.95857,1.95863,1.95869,1.95875,1.9588,1.95886,1.95892,1.95898,1.95904,1.9591,1.95916,1.95922,1.95928,1.95934,1.9594,1.95945,1.95951,1.95957,1.95963,1.95969,1.95975,1.9598,1.95986,1.95992,1.95998,1.96004,1.96009,1.96015,1.96021,1.96027,1.96032};
-
-constexpr double stored_gamma_values_n5[] = {1.0,1.0,1.0,1.0,1.0,0.99999,0.99999,0.99999,0.99999,0.99998,0.99998,0.99997,0.99997,0.99996,0.99996,0.99995,0.99995,0.99994,0.99993,0.99993,0.99992,0.99991,0.9999,0.99989,0.99988,0.99987,0.99986,0.99985,0.99984,0.99983,0.99981,0.9998,0.99979,0.99978,0.99976,0.99975,0.99973,0.99972,0.9997,0.99969,0.99967,0.99965,0.99964,0.99962,0.9996,0.99958,0.99957,0.99955,0.99953,0.99951,0.99949,0.99947,0.99945,0.99943,0.9994,0.99938,0.99936,0.99934,0.99931,0.99929,0.99927,0.99924,0.99922,0.99919,0.99917,0.99914,0.99911,0.99909,0.99906,0.99903,0.99901,0.99898,0.99895,0.99892,0.99889,0.99886,0.99883,0.9988,0.99877,0.99874,0.99871,0.99867,0.99864,0.99861,0.99858,0.99854,0.99851,0.99847,0.99844,0.9984,0.99837,0.99833,0.9983,0.99826,0.99822,0.99819,0.99815,0.99811,0.99807,0.99803,
-0.998,0.99796,0.99792,0.99788,0.99784,0.9978,0.99775,0.99771,0.99767,0.99763,0.99759,0.99754,0.9975,0.99746,0.99741,0.99737,0.99732,0.99728,0.99723,0.99718,0.99714,0.99709,0.99704,0.997,0.99695,0.9969,0.99685,0.9968,0.99676,0.99671,0.99666,0.99661,0.99656,0.9965,0.99645,0.9964,0.99635,0.9963,0.99624,0.99619,0.99614,0.99608,0.99603,0.99598,0.99592,0.99587,0.99581,0.99576,0.9957,0.99564,0.99559,0.99553,0.99547,0.99541,0.99536,0.9953,0.99524,0.99518,0.99512,0.99506,0.995,0.99494,0.99488,0.99482,0.99476,0.99469,0.99463,0.99457,0.99451,0.99444,0.99438,0.99431,0.99425,0.99419,0.99412,0.99406,0.99399,0.99392,0.99386,0.99379,0.99372,0.99366,0.99359,0.99352,0.99345,0.99339,0.99332,0.99325,0.99318,0.99311,0.99304,0.99297,0.9929,0.99283,0.99275,0.99268,0.99261,0.99254,0.99247,0.99239,
-0.99232,0.99225,0.99217,0.9921,0.99202,0.99195,0.99187,0.9918,0.99172,0.99164,0.99157,0.99149,0.99141,0.99134,0.99126,0.99118,0.9911,0.99102,0.99094,0.99086,0.99078,0.9907,0.99062,0.99054,0.99046,0.99038,0.9903,0.99022,0.99014,0.99005,0.98997,0.98989,0.9898,0.98972,0.98964,0.98955,0.98947,0.98938,0.9893,0.98921,0.98913,0.98904,0.98895,0.98887,0.98878,0.98869,0.9886,0.98852,0.98843,0.98834,0.98825,0.98816,0.98807,0.98798,0.98789,0.9878,0.98771,0.98762,0.98753,0.98744,0.98735,0.98725,0.98716,0.98707,0.98698,0.98688,0.98679,0.9867,0.9866,0.98651,0.98641,0.98632,0.98622,0.98613,0.98603,0.98593,0.98584,0.98574,0.98564,0.98555,0.98545,0.98535,0.98525,0.98515,0.98506,0.98496,0.98486,0.98476,0.98466,0.98456,0.98446,0.98436,0.98426,0.98415,0.98405,0.98395,0.98385,0.98375,0.98364,0.98354,
-0.98344,0.98333,0.98323,0.98313,0.98302,0.98292,0.98281,0.98271,0.9826,0.9825,0.98239,0.98228,0.98218,0.98207,0.98196,0.98186,0.98175,0.98164,0.98153,0.98142,0.98131,0.98121,0.9811,0.98099,0.98088,0.98077,0.98066,0.98055,0.98043,0.98032,0.98021,0.9801,0.97999,0.97988,0.97976,0.97965,0.97954,0.97942,0.97931,0.9792,0.97908,0.97897,0.97885,0.97874,0.97862,0.97851,0.97839,0.97828,0.97816,0.97805,0.97793,0.97781,0.97769,0.97758,0.97746,0.97734,0.97722,0.9771,0.97699,0.97687,0.97675,0.97663,0.97651,0.97639,0.97627,0.97615,0.97603,0.97591,0.97579,0.97566,0.97554,0.97542,0.9753,0.97518,0.97505,0.97493,0.97481,0.97468,0.97456,0.97443,0.97431,0.97419,0.97406,0.97394,0.97381,0.97369,0.97356,0.97343,0.97331,0.97318,0.97305,0.97293,0.9728,0.97267,0.97255,0.97242,0.97229,0.97216,0.97203,0.9719,
-0.97177,0.97165,0.97152,0.97139,0.97126,0.97113,0.97099,0.97086,0.97073,0.9706,0.97047,0.97034,0.97021,0.97007,0.96994,0.96981,0.96968,0.96954,0.96941,0.96928,0.96914,0.96901,0.96887,0.96874,0.96861,0.96847,0.96833,0.9682,0.96806,0.96793,0.96779,0.96766,0.96752,0.96738,0.96724,0.96711,0.96697,0.96683,0.96669,0.96656,0.96642,0.96628,0.96614,0.966,0.96586,0.96572,0.96558,0.96544,0.9653,0.96516,0.96502,0.96488,0.96474,0.9646,0.96446,0.96432,0.96417,0.96403,0.96389,0.96375,0.9636,0.96346,0.96332,0.96317,0.96303,0.96289,0.96274,0.9626,0.96245,0.96231,0.96216,0.96202,0.96187,0.96173,0.96158,0.96143,0.96129,0.96114,0.961,0.96085,0.9607,0.96055,0.96041,0.96026,0.96011,0.95996,0.95981,0.95967,0.95952,0.95937,0.95922,0.95907,0.95892,0.95877,0.95862,0.95847,0.95832,0.95817,0.95802,0.95787,
-0.95771,0.95756,0.95741,0.95726,0.95711,0.95695,0.9568,0.95665,0.9565,0.95634,0.95619,0.95604,0.95588,0.95573,0.95557,0.95542,0.95527,0.95511,0.95496,0.9548,0.95465,0.95449,0.95433,0.95418,0.95402,0.95387,0.95371,0.95355,0.9534,0.95324,0.95308,0.95292,0.95277,0.95261,0.95245,0.95229,0.95213,0.95197,0.95182,0.95166,0.9515,0.95134,0.95118,0.95102,0.95086,0.9507,0.95054,0.95038,0.95022,0.95006,0.94989,0.94973,0.94957,0.94941,0.94925,0.94909,0.94892,0.94876,0.9486,0.94844,0.94827,0.94811,0.94795,0.94778,0.94762,0.94745,0.94729,0.94713,0.94696,0.9468,0.94663,0.94647,0.9463,0.94614,0.94597,0.9458,0.94564,0.94547,0.94531,0.94514,0.94497,0.94481,0.94464,0.94447,0.9443,0.94414,0.94397,0.9438,0.94363,0.94347,0.9433,0.94313,0.94296,0.94279,0.94262,0.94245,0.94228,0.94211,0.94194,0.94177,
-0.9416,0.94143,0.94126,0.94109,0.94092,0.94075,0.94058,0.94041,0.94023,0.94006,0.93989,0.93972,0.93955,0.93937,0.9392,0.93903,0.93886,0.93868,0.93851,0.93834,0.93816,0.93799,0.93781,0.93764,0.93747,0.93729,0.93712,0.93694,0.93677,0.93659,0.93642,0.93624,0.93607,0.93589,0.93571,0.93554,0.93536,0.93518,0.93501,0.93483,0.93465,0.93448,0.9343,0.93412,0.93394,0.93377,0.93359,0.93341,0.93323,0.93305,0.93288,0.9327,0.93252,0.93234,0.93216,0.93198,0.9318,0.93162,0.93144,0.93126,0.93108,0.9309,0.93072,0.93054,0.93036,0.93018,0.93,0.92982,0.92963,0.92945,0.92927,0.92909,0.92891,0.92873,0.92854,0.92836,0.92818,0.928,0.92781,0.92763,0.92745,0.92726,0.92708,0.9269,0.92671,0.92653,0.92634,0.92616,0.92598,0.92579,0.92561,0.92542,0.92524,0.92505,0.92487,0.92468,0.92449,0.92431,0.92412,0.92394,
-0.92375,0.92356,0.92338,0.92319,0.923,0.92282,0.92263,0.92244,0.92226,0.92207,0.92188,0.92169,0.92151,0.92132,0.92113,0.92094,0.92075,0.92056,0.92038,0.92019,0.92,0.91981,0.91962,0.91943,0.91924,0.91905,0.91886,0.91867,0.91848,0.91829,0.9181,0.91791,0.91772,0.91753,0.91734,0.91714,0.91695,0.91676,0.91657,0.91638,0.91619,0.916,0.9158,0.91561,0.91542,0.91523,0.91503,0.91484,0.91465,0.91446,0.91426,0.91407,0.91388,0.91368,0.91349,0.91329,0.9131,0.91291,0.91271,0.91252,0.91232,0.91213,0.91193,0.91174,0.91154,0.91135,0.91115,0.91096,0.91076,0.91057,0.91037,0.91018,0.90998,0.90978,0.90959,0.90939,0.90919,0.909,0.9088,0.9086,0.90841,0.90821,0.90801,0.90782,0.90762,0.90742,0.90722,0.90702,0.90683,0.90663,0.90643,0.90623,0.90603,0.90584,0.90564,0.90544,0.90524,0.90504,0.90484,0.90464,
-0.90444,0.90424,0.90404,0.90384,0.90364,0.90344,0.90324,0.90304,0.90284,0.90264,0.90244,0.90224,0.90204,0.90184,0.90164,0.90144,0.90123,0.90103,0.90083,0.90063,0.90043,0.90023,0.90002,0.89982,0.89962,0.89942,0.89921,0.89901,0.89881,0.89861,0.8984,0.8982,0.898,0.89779,0.89759,0.89739,0.89718,0.89698,0.89677,0.89657,0.89637,0.89616,0.89596,0.89575,0.89555,0.89534,0.89514,0.89494,0.89473,0.89453,0.89432,0.89411,0.89391,0.8937,0.8935,0.89329,0.89309,0.89288,0.89267,0.89247,0.89226,0.89206,0.89185,0.89164,0.89144,0.89123,0.89102,0.89082,0.89061,0.8904,0.89019,0.88999,0.88978,0.88957,0.88936,0.88916,0.88895,0.88874,0.88853,0.88832,0.88811,0.88791,0.8877,0.88749,0.88728,0.88707,0.88686,0.88665,0.88644,0.88623,0.88603,0.88582,0.88561,0.8854,0.88519,0.88498,0.88477,0.88456,0.88435,0.88414,
-0.88393,0.88372,0.88351,0.88329,0.88308,0.88287,0.88266,0.88245,0.88224,0.88203,0.88182,0.88161,0.88139,0.88118,0.88097,0.88076,0.88055,0.88034,0.88012,0.87991,0.8797,0.87949,0.87927,0.87906,0.87885,0.87864,0.87842,0.87821,0.878,0.87778,0.87757,0.87736,0.87714,0.87693,0.87672,0.8765,0.87629,0.87608,0.87586,0.87565,0.87543,0.87522,0.87501,0.87479,0.87458,0.87436,0.87415,0.87393,0.87372,0.8735,0.87329,0.87307,0.87286,0.87264,0.87243,0.87221,0.872,0.87178,0.87157,0.87135,0.87113,0.87092,0.8707,0.87049,0.87027,0.87005,0.86984,0.86962,0.8694,0.86919,0.86897,0.86875,0.86854,0.86832,0.8681,0.86789,0.86767,0.86745,0.86723,0.86702,0.8668,0.86658,0.86636,0.86615,0.86593,0.86571,0.86549,0.86527,0.86506,0.86484,0.86462,0.8644,0.86418,0.86396,0.86375,0.86353,0.86331,0.86309,0.86287,0.86265,
-0.86243,0.86221,0.86199,0.86177,0.86156,0.86134,0.86112,0.8609,0.86068,0.86046,0.86024,0.86002,0.8598,0.85958,0.85936,0.85914,0.85892,0.8587,0.85848,0.85826,0.85803,0.85781,0.85759,0.85737,0.85715,0.85693,0.85671,0.85649,0.85627,0.85605,0.85582,0.8556,0.85538,0.85516,0.85494,0.85472,0.8545,0.85427,0.85405,0.85383,0.85361,0.85339,0.85316,0.85294,0.85272,0.8525,0.85227,0.85205,0.85183,0.85161,0.85138,0.85116,0.85094,0.85071,0.85049,0.85027,0.85005,0.84982,0.8496,0.84938,0.84915,0.84893,0.8487,0.84848,0.84826,0.84803,0.84781,0.84759,0.84736,0.84714,0.84691,0.84669,0.84647,0.84624,0.84602,0.84579,0.84557,0.84534,0.84512,0.84489,0.84467,0.84445,0.84422,0.844,0.84377,0.84355,0.84332,0.8431,0.84287,0.84265,0.84242,0.84219,0.84197,0.84174,0.84152,0.84129,0.84107,0.84084,0.84061,0.84039,
-0.84016,0.83994,0.83971,0.83949,0.83926,0.83903,0.83881,0.83858,0.83835,0.83813,0.8379,0.83767,0.83745,0.83722,0.83699,0.83677,0.83654,0.83631,0.83609,0.83586,0.83563,0.83541,0.83518,0.83495,0.83472,0.8345,0.83427,0.83404,0.83381,0.83359,0.83336,0.83313,0.8329,0.83268,0.83245,0.83222,0.83199,0.83176,0.83154,0.83131,0.83108,0.83085,0.83062,0.83039,0.83017,0.82994,0.82971,0.82948,0.82925,0.82902,0.8288,0.82857,0.82834,0.82811,0.82788,0.82765,0.82742,0.82719,0.82696,0.82673,0.82651,0.82628,0.82605,0.82582,0.82559,0.82536,0.82513,0.8249,0.82467,0.82444,0.82421,0.82398,0.82375,0.82352,0.82329,0.82306,0.82283,0.8226,0.82237,0.82214,0.82191,0.82168,0.82145,0.82122,0.82099,0.82076,0.82053,0.8203,0.82007,0.81984,0.81961,0.81938,0.81915,0.81892,0.81869,0.81845,0.81822,0.81799,0.81776,0.81753,
-0.8173,0.81707,0.81684,0.81661,0.81638,0.81614,0.81591,0.81568,0.81545,0.81522,0.81499,0.81476,0.81452,0.81429,0.81406,0.81383,0.8136,0.81337,0.81313,0.8129,0.81267,0.81244,0.81221,0.81197,0.81174,0.81151,0.81128,0.81105,0.81081,0.81058,0.81035,0.81012,0.80988,0.80965,0.80942,0.80919,0.80896,0.80872,0.80849,0.80826,0.80802,0.80779,0.80756,0.80733,0.80709,0.80686,0.80663,0.8064,0.80616,0.80593,0.8057,0.80546,0.80523,0.805,0.80476,0.80453,0.8043,0.80406,0.80383,0.8036,0.80336,0.80313,0.8029,0.80266,0.80243,0.8022,0.80196,0.80173,0.8015,0.80126,0.80103,0.8008,0.80056,0.80033,0.80009,0.79986,0.79963,0.79939,0.79916,0.79892,0.79869,0.79846,0.79822,0.79799,0.79775,0.79752,0.79729,0.79705,0.79682,0.79658,0.79635,0.79611,0.79588,0.79565,0.79541,0.79518,0.79494,0.79471,0.79447,0.79424,
-0.794,0.79377,0.79353,0.7933,0.79307,0.79283,0.7926,0.79236,0.79213,0.79189,0.79166,0.79142,0.79119,0.79095,0.79072,0.79048,0.79025,0.79001,0.78978,0.78954,0.78931,0.78907,0.78884,0.7886,0.78836,0.78813,0.78789,0.78766,0.78742,0.78719,0.78695,0.78672,0.78648,0.78625,0.78601,0.78578,0.78554,0.7853,0.78507,0.78483,0.7846,0.78436,0.78413,0.78389,0.78365,0.78342,0.78318,0.78295,0.78271,0.78248,0.78224,0.782,0.78177,0.78153,0.7813,0.78106,0.78082,0.78059,0.78035,0.78012,0.77988,0.77964,0.77941,0.77917,0.77893,0.7787,0.77846,0.77823,0.77799,0.77775,0.77752,0.77728,0.77704,0.77681,0.77657,0.77634,0.7761,0.77586,0.77563,0.77539,0.77515,0.77492,0.77468,0.77444,0.77421,0.77397,0.77373,0.7735,0.77326,0.77302,0.77279,0.77255,0.77231,0.77208,0.77184,0.7716,0.77137,0.77113,0.77089,0.77066,
-0.77042,0.77018,0.76995,0.76971,0.76947,0.76924,0.769,0.76876,0.76852,0.76829,0.76805,0.76781,0.76758,0.76734,0.7671,0.76687,0.76663,0.76639,0.76615,0.76592,0.76568,0.76544,0.76521,0.76497,0.76473,0.76449,0.76426,0.76402,0.76378,0.76354,0.76331,0.76307,0.76283,0.7626,0.76236,0.76212,0.76188,0.76165,0.76141,0.76117,0.76093,0.7607,0.76046,0.76022,0.75998,0.75975,0.75951,0.75927,0.75903,0.7588,0.75856,0.75832,0.75808,0.75785,0.75761,0.75737,0.75713,0.7569,0.75666,0.75642,0.75618,0.75595,0.75571,0.75547,0.75523,0.755,0.75476,0.75452,0.75428,0.75405,0.75381,0.75357,0.75333,0.7531,0.75286,0.75262,0.75238,0.75214,0.75191,0.75167,0.75143,0.75119,0.75096,0.75072,0.75048,0.75024,0.75,0.74977,0.74953,0.74929,0.74905,0.74882,0.74858,0.74834,0.7481,0.74786,0.74763,0.74739,0.74715,0.74691,
-0.74667,0.74644,0.7462,0.74596,0.74572,0.74549,0.74525,0.74501,0.74477,0.74453,0.7443,0.74406,0.74382,0.74358,0.74334,0.74311,0.74287,0.74263,0.74239,0.74215,0.74192,0.74168,0.74144,0.7412,0.74096,0.74073,0.74049,0.74025,0.74001,0.73977,0.73954,0.7393,0.73906,0.73882,0.73859,0.73835,0.73811,0.73787,0.73763,0.7374,0.73716,0.73692,0.73668,0.73644,0.73621,0.73597,0.73573,0.73549,0.73525,0.73502,0.73478,0.73454,0.7343,0.73406,0.73383,0.73359,0.73335,0.73311,0.73287,0.73264,0.7324,0.73216,0.73192,0.73168,0.73145,0.73121,0.73097,0.73073,0.73049,0.73026,0.73002,0.72978,0.72954,0.7293,0.72907,0.72883,0.72859,0.72835,0.72811,0.72788,0.72764,0.7274,0.72716,0.72693,0.72669,0.72645,0.72621,0.72597,0.72574,0.7255,0.72526,0.72502,0.72478,0.72455,0.72431,0.72407,0.72383,0.72359,0.72336,0.72312,
-0.72288,0.72264,0.72241,0.72217,0.72193,0.72169,0.72145,0.72122,0.72098,0.72074,0.7205,0.72027,0.72003,0.71979,0.71955,0.71931,0.71908,0.71884,0.7186,0.71836,0.71813,0.71789,0.71765,0.71741,0.71718,0.71694,0.7167,0.71646,0.71622,0.71599,0.71575,0.71551,0.71527,0.71504,0.7148,0.71456,0.71432,0.71409,0.71385,0.71361,0.71337,0.71314,0.7129,0.71266,0.71242,0.71219,0.71195,0.71171,0.71147,0.71124,0.711,0.71076,0.71052,0.71029,0.71005,0.70981,0.70957,0.70934,0.7091,0.70886,0.70862,0.70839,0.70815,0.70791,0.70767,0.70744,0.7072,0.70696,0.70673,0.70649,0.70625,0.70601,0.70578,0.70554,0.7053,0.70506,0.70483,0.70459,0.70435,0.70412,0.70388,0.70364,0.7034,0.70317,0.70293,0.70269,0.70246,0.70222,0.70198,0.70175,0.70151,0.70127,0.70103,0.7008,0.70056,0.70032,0.70009,0.69985,0.69961,0.69938,
-0.69914,0.6989,0.69867,0.69843,0.69819,0.69795,0.69772,0.69748,0.69724,0.69701,0.69677,0.69653,0.6963,0.69606,0.69582,0.69559,0.69535,0.69511,0.69488,0.69464,0.6944,0.69417,0.69393,0.6937,0.69346,0.69322,0.69299,0.69275,0.69251,0.69228,0.69204,0.6918,0.69157,0.69133,0.69109,0.69086,0.69062,0.69039,0.69015,0.68991,0.68968,0.68944,0.6892,0.68897,0.68873,0.6885,0.68826,0.68802,0.68779,0.68755,0.68731,0.68708,0.68684,0.68661,0.68637,0.68613,0.6859,0.68566,0.68543,0.68519,0.68495,0.68472,0.68448,0.68425,0.68401,0.68378,0.68354,0.6833,0.68307,0.68283,0.6826,0.68236,0.68213,0.68189,0.68165,0.68142,0.68118,0.68095,0.68071,0.68048,0.68024,0.68001,0.67977,0.67953,0.6793,0.67906,0.67883,0.67859,0.67836,0.67812,0.67789,0.67765,0.67742,0.67718,0.67695,0.67671,0.67648,0.67624,0.67601,0.67577,
-0.67553,0.6753,0.67506,0.67483,0.67459,0.67436,0.67412,0.67389,0.67365,0.67342,0.67319,0.67295,0.67272,0.67248,0.67225,0.67201,0.67178,0.67154,0.67131,0.67107,0.67084,0.6706,0.67037,0.67013,0.6699,0.66966,0.66943,0.6692,0.66896,0.66873,0.66849,0.66826,0.66802,0.66779,0.66755,0.66732,0.66709,0.66685,0.66662,0.66638,0.66615,0.66592,0.66568,0.66545,0.66521,0.66498,0.66474,0.66451,0.66428,0.66404,0.66381,0.66357,0.66334,0.66311,0.66287,0.66264,0.66241,0.66217,0.66194,0.6617,0.66147,0.66124,0.661,0.66077,0.66054,0.6603,0.66007,0.65984,0.6596,0.65937,0.65914,0.6589,0.65867,0.65843,0.6582,0.65797,0.65774,0.6575,0.65727,0.65704,0.6568,0.65657,0.65634,0.6561,0.65587,0.65564,0.6554,0.65517,0.65494,0.6547,0.65447,0.65424,0.65401,0.65377,0.65354,0.65331,0.65308,0.65284,0.65261,0.65238,
-0.65214,0.65191,0.65168,0.65145,0.65121,0.65098,0.65075,0.65052,0.65028,0.65005,0.64982,0.64959,0.64936,0.64912,0.64889,0.64866,0.64843,0.64819,0.64796,0.64773,0.6475,0.64727,0.64703,0.6468,0.64657,0.64634,0.64611,0.64587,0.64564,0.64541,0.64518,0.64495,0.64472,0.64448,0.64425,0.64402,0.64379,0.64356,0.64333,0.64309,0.64286,0.64263,0.6424,0.64217,0.64194,0.64171,0.64148,0.64124,0.64101,0.64078,0.64055,0.64032,0.64009,0.63986,0.63963,0.6394,0.63916,0.63893,0.6387,0.63847,0.63824,0.63801,0.63778,0.63755,0.63732,0.63709,0.63686,0.63663,0.6364,0.63616,0.63593,0.6357,0.63547,0.63524,0.63501,0.63478,0.63455,0.63432,0.63409,0.63386,0.63363,0.6334,0.63317,0.63294,0.63271,0.63248,0.63225,0.63202,0.63179,0.63156,0.63133,0.6311,0.63087,0.63064,0.63041,0.63018,0.62995,0.62972,0.62949,0.62926,
-0.62903,0.6288,0.62857,0.62835,0.62812,0.62789,0.62766,0.62743,0.6272,0.62697,0.62674,0.62651,0.62628,0.62605,0.62582,0.62559,0.62537,0.62514,0.62491,0.62468,0.62445,0.62422,0.62399,0.62376,0.62354,0.62331,0.62308,0.62285,0.62262,0.62239,0.62216,0.62194,0.62171,0.62148,0.62125,0.62102,0.62079,0.62057,0.62034,0.62011,0.61988,0.61965,0.61942,0.6192,0.61897,0.61874,0.61851,0.61828,0.61806,0.61783,0.6176,0.61737,0.61715,0.61692,0.61669,0.61646,0.61624,0.61601,0.61578,0.61555,0.61533,0.6151,0.61487,0.61464,0.61442,0.61419,0.61396,0.61373,0.61351,0.61328,0.61305,0.61283,0.6126,0.61237,0.61215,0.61192,0.61169,0.61147,0.61124,0.61101,0.61079,0.61056,0.61033,0.61011,0.60988,0.60965,0.60943,0.6092,0.60897,0.60875,0.60852,0.60829,0.60807,0.60784,0.60762,0.60739,0.60716,0.60694,0.60671,0.60649,
-0.60626,0.60603,0.60581,0.60558,0.60536,0.60513,0.6049,0.60468,0.60445,0.60423,0.604,0.60378,0.60355,0.60333,0.6031,0.60288,0.60265,0.60243,0.6022,0.60197,0.60175,0.60152,0.6013,0.60107,0.60085,0.60062,0.6004,0.60017,0.59995,0.59973,0.5995,0.59928,0.59905,0.59883,0.5986,0.59838,0.59815,0.59793,0.5977,0.59748,0.59726,0.59703,0.59681,0.59658,0.59636,0.59613,0.59591,0.59569,0.59546,0.59524,0.59501,0.59479,0.59457,0.59434,0.59412,0.5939,0.59367,0.59345,0.59322,0.593,0.59278,0.59255,0.59233,0.59211,0.59188,0.59166,0.59144,0.59121,0.59099,0.59077,0.59054,0.59032,0.5901,0.58988,0.58965,0.58943,0.58921,0.58898,0.58876,0.58854,0.58832,0.58809,0.58787,0.58765,0.58742,0.5872,0.58698,0.58676,0.58654,0.58631,0.58609,0.58587,0.58565,0.58542,0.5852,0.58498,0.58476,0.58454,0.58431,0.58409,
-0.58387,0.58365,0.58343,0.58321,0.58298,0.58276,0.58254,0.58232,0.5821,0.58188,0.58165,0.58143,0.58121,0.58099,0.58077,0.58055,0.58033,0.58011,0.57988,0.57966,0.57944,0.57922,0.579,0.57878,0.57856,0.57834,0.57812,0.5779,0.57768,0.57746,0.57723,0.57701,0.57679,0.57657,0.57635,0.57613,0.57591,0.57569,0.57547,0.57525,0.57503,0.57481,0.57459,0.57437,0.57415,0.57393,0.57371,0.57349,0.57327,0.57305,0.57283,0.57261,0.57239,0.57217,0.57195,0.57174,0.57152,0.5713,0.57108,0.57086,0.57064,0.57042,0.5702,0.56998,0.56976,0.56954,0.56932,0.56911,0.56889,0.56867,0.56845,0.56823,0.56801,0.56779,0.56757,0.56736,0.56714,0.56692,0.5667,0.56648,0.56626,0.56605,0.56583,0.56561,0.56539,0.56517,0.56495,0.56474,0.56452,0.5643,0.56408,0.56387,0.56365,0.56343,0.56321,0.56299,0.56278,0.56256,0.56234,0.56212,
-0.56191,0.56169,0.56147,0.56126,0.56104,0.56082,0.5606,0.56039,0.56017,0.55995,0.55974,0.55952,0.5593,0.55909,0.55887,0.55865,0.55843,0.55822,0.558,0.55779,0.55757,0.55735,0.55714,0.55692,0.5567,0.55649,0.55627,0.55605,0.55584,0.55562,0.55541,0.55519,0.55497,0.55476,0.55454,0.55433,0.55411,0.5539,0.55368,0.55346,0.55325,0.55303,0.55282,0.5526,0.55239,0.55217,0.55196,0.55174,0.55153,0.55131,0.5511,0.55088,0.55067,0.55045,0.55024,0.55002,0.54981,0.54959,0.54938,0.54916,0.54895,0.54873,0.54852,0.5483,0.54809,0.54788,0.54766,0.54745,0.54723,0.54702,0.54681,0.54659,0.54638,0.54616,0.54595,0.54574,0.54552,0.54531,0.54509,0.54488,0.54467,0.54445,0.54424,0.54403,0.54381,0.5436,0.54339,0.54317,0.54296,0.54275,0.54253,0.54232,0.54211,0.54189,0.54168,0.54147,0.54126,0.54104,0.54083,0.54062,
-0.5404,0.54019,0.53998,0.53977,0.53955,0.53934,0.53913,0.53892,0.53871,0.53849,0.53828,0.53807,0.53786,0.53764,0.53743,0.53722,0.53701,0.5368,0.53659,0.53637,0.53616,0.53595,0.53574,0.53553,0.53532,0.5351,0.53489,0.53468,0.53447,0.53426,0.53405,0.53384,0.53363,0.53341,0.5332,0.53299,0.53278,0.53257,0.53236,0.53215,0.53194,0.53173,0.53152,0.53131,0.5311,0.53089,0.53068,0.53047,0.53026,0.53005,0.52983,0.52962,0.52941,0.5292,0.52899,0.52878,0.52858,0.52837,0.52816,0.52795,0.52774,0.52753,0.52732,0.52711,0.5269,0.52669,0.52648,0.52627,0.52606,0.52585,0.52564,0.52543,0.52522,0.52501,0.52481,0.5246,0.52439,0.52418,0.52397,0.52376,0.52355,0.52334,0.52314,0.52293,0.52272,0.52251,0.5223,0.52209,0.52189,0.52168,0.52147,0.52126,0.52105,0.52085,0.52064,0.52043,0.52022,0.52001,0.51981,0.5196,
-0.51939,0.51918,0.51898,0.51877,0.51856,0.51835,0.51815,0.51794,0.51773,0.51752,0.51732,0.51711,0.5169,0.5167,0.51649,0.51628,0.51608,0.51587,0.51566,0.51546,0.51525,0.51504,0.51484,0.51463,0.51442,0.51422,0.51401,0.5138,0.5136,0.51339,0.51319,0.51298,0.51277,0.51257,0.51236,0.51216,0.51195,0.51174,0.51154,0.51133,0.51113,0.51092,0.51072,0.51051,0.51031,0.5101,0.5099,0.50969,0.50949,0.50928,0.50907,0.50887,0.50866,0.50846,0.50826,0.50805,0.50785,0.50764,0.50744,0.50723,0.50703,0.50682,0.50662,0.50641,0.50621,0.50601,0.5058,0.5056,0.50539,0.50519,0.50498,0.50478,0.50458,0.50437,0.50417,0.50397,0.50376,0.50356,0.50335,0.50315,0.50295,0.50274,0.50254,0.50234,0.50213,0.50193,0.50173,0.50153,0.50132,0.50112,0.50092,0.50071,0.50051,0.50031,0.5001,0.4999,0.4997,0.4995,0.49929,0.49909,
-0.49889,0.49869,0.49848,0.49828,0.49808,0.49788,0.49768,0.49747,0.49727,0.49707,0.49687,0.49667,0.49646,0.49626,0.49606,0.49586,0.49566,0.49546,0.49526,0.49505,0.49485,0.49465,0.49445,0.49425,0.49405,0.49385,0.49365,0.49344,0.49324,0.49304,0.49284,0.49264,0.49244,0.49224,0.49204,0.49184,0.49164,0.49144,0.49124,0.49104,0.49084,0.49064,0.49044,0.49024,0.49004,0.48984,0.48964,0.48944,0.48924,0.48904,0.48884,0.48864,0.48844,0.48824,0.48804,0.48784,0.48764,0.48744,0.48724,0.48704,0.48684,0.48664,0.48644,0.48624,0.48605,0.48585,0.48565,0.48545,0.48525,0.48505,0.48485,0.48465,0.48446,0.48426,0.48406,0.48386,0.48366,0.48346,0.48327,0.48307,0.48287,0.48267,0.48247,0.48228,0.48208,0.48188,0.48168,0.48148,0.48129,0.48109,0.48089,0.48069,0.4805,0.4803,0.4801,0.4799,0.47971,0.47951,0.47931,0.47912,
-0.47892,0.47872,0.47852,0.47833,0.47813,0.47793,0.47774,0.47754,0.47734,0.47715,0.47695,0.47675,0.47656,0.47636,0.47617,0.47597,0.47577,0.47558,0.47538,0.47519,0.47499,0.47479,0.4746,0.4744,0.47421,0.47401,0.47381,0.47362,0.47342,0.47323,0.47303,0.47284,0.47264,0.47245,0.47225,0.47206,0.47186,0.47167,0.47147,0.47128,0.47108,0.47089,0.47069,0.4705,0.4703,0.47011,0.46991,0.46972,0.46953,0.46933,0.46914,0.46894,0.46875,0.46855,0.46836,0.46817,0.46797,0.46778,0.46758,0.46739,0.4672,0.467,0.46681,0.46662,0.46642,0.46623,0.46604,0.46584,0.46565,0.46546,0.46526,0.46507,0.46488,0.46468,0.46449,0.4643,0.4641,0.46391,0.46372,0.46353,0.46333,0.46314,0.46295,0.46276,0.46256,0.46237,0.46218,0.46199,0.46179,0.4616,0.46141,0.46122,0.46103,0.46083,0.46064,0.46045,0.46026,0.46007,0.45988,0.45968,
-0.45949,0.4593,0.45911,0.45892,0.45873,0.45854,0.45834,0.45815,0.45796,0.45777,0.45758,0.45739,0.4572,0.45701,0.45682,0.45663,0.45643,0.45624,0.45605,0.45586,0.45567,0.45548,0.45529,0.4551,0.45491,0.45472,0.45453,0.45434,0.45415,0.45396,0.45377,0.45358,0.45339,0.4532,0.45301,0.45282,0.45263,0.45244,0.45225,0.45207,0.45188,0.45169,0.4515,0.45131,0.45112,0.45093,0.45074,0.45055,0.45036,0.45017,0.44999,0.4498,0.44961,0.44942,0.44923,0.44904,0.44885,0.44867,0.44848,0.44829,0.4481,0.44791,0.44773,0.44754,0.44735,0.44716,0.44697,0.44679,0.4466,0.44641,0.44622,0.44604,0.44585,0.44566,0.44547,0.44529,0.4451,0.44491,0.44472,0.44454,0.44435,0.44416,0.44398,0.44379,0.4436,0.44342,0.44323,0.44304,0.44286,0.44267,0.44248,0.4423,0.44211,0.44192,0.44174,0.44155,0.44136,0.44118,0.44099,0.44081,
-0.44062,0.44043,0.44025,0.44006,0.43988,0.43969,0.43951,0.43932,0.43913,0.43895,0.43876,0.43858,0.43839,0.43821,0.43802,0.43784,0.43765,0.43747,0.43728,0.4371,0.43691,0.43673,0.43654,0.43636,0.43617,0.43599,0.4358,0.43562,0.43544,0.43525,0.43507,0.43488,0.4347,0.43452,0.43433,0.43415,0.43396,0.43378,0.4336,0.43341,0.43323,0.43304,0.43286,0.43268,0.43249,0.43231,0.43213,0.43194,0.43176,0.43158,0.43139,0.43121,0.43103,0.43085,0.43066,0.43048,0.4303,0.43011,0.42993,0.42975,0.42957,0.42938,0.4292,0.42902,0.42884,0.42865,0.42847,0.42829,0.42811,0.42793,0.42774,0.42756,0.42738,0.4272,0.42702,0.42683,0.42665,0.42647,0.42629,0.42611,0.42593,0.42574,0.42556,0.42538,0.4252,0.42502,0.42484,0.42466,0.42448,0.4243,0.42411,0.42393,0.42375,0.42357,0.42339,0.42321,0.42303,0.42285,0.42267,0.42249};
\ No newline at end of file
index 3579618..b60e397 100644 (file)
@@ -18,155 +18,48 @@ public:
         points_mat(&points_), points ((float*) points_.data) {}
 
     int estimate (const std::vector<int>& sample, std::vector<Mat> &models) const override {
-        // OpenCV RHO:
-        const int smpl0 = 4*sample[0], smpl1 = 4*sample[1], smpl2 = 4*sample[2], smpl3 = 4*sample[3];
-        const auto x0 = points[smpl0], y0 = points[smpl0+1], X0 = points[smpl0+2], Y0 = points[smpl0+3];
-        const auto x1 = points[smpl1], y1 = points[smpl1+1], X1 = points[smpl1+2], Y1 = points[smpl1+3];
-        const auto x2 = points[smpl2], y2 = points[smpl2+1], X2 = points[smpl2+2], Y2 = points[smpl2+3];
-        const auto x3 = points[smpl3], y3 = points[smpl3+1], X3 = points[smpl3+2], Y3 = points[smpl3+3];
-        const double x0X0 = x0*X0, x1X1 = x1*X1, x2X2 = x2*X2, x3X3 = x3*X3;
-        const double x0Y0 = x0*Y0, x1Y1 = x1*Y1, x2Y2 = x2*Y2, x3Y3 = x3*Y3;
-        const double y0X0 = y0*X0, y1X1 = y1*X1, y2X2 = y2*X2, y3X3 = y3*X3;
-        const double y0Y0 = y0*Y0, y1Y1 = y1*Y1, y2Y2 = y2*Y2, y3Y3 = y3*Y3;
-
-        double minor[2][4] = {{x0-x2, x1-x2, x2, x3-x2},
-                              {y0-y2, y1-y2, y2, y3-y2}};
-
-        double major[3][8] = {{x2X2-x0X0, x2X2-x1X1, -x2X2, x2X2-x3X3, x2Y2-x0Y0, x2Y2-x1Y1, -x2Y2, x2Y2-x3Y3},
-                              {y2X2-y0X0, y2X2-y1X1, -y2X2, y2X2-y3X3, y2Y2-y0Y0, y2Y2-y1Y1, -y2Y2, y2Y2-y3Y3},
-                              {X0-X2    , X1-X2    , X2   , X3-X2    , Y0-Y2    , Y1-Y2    , Y2   , Y3-Y2    }};
-        /**
-         * int i;
-         * for(i=0;i<8;i++) major[2][i]=-major[2][i];
-         * Eliminate column 0 of rows 1 and 3
-         * R(1)=(x0-x2)*R(1)-(x1-x2)*R(0),     y1'=(y1-y2)(x0-x2)-(x1-x2)(y0-y2)
-         * R(3)=(x0-x2)*R(3)-(x3-x2)*R(0),     y3'=(y3-y2)(x0-x2)-(x3-x2)(y0-y2)
-         */
-
-        double scalar1=minor[0][0], scalar2=minor[0][1];
-        minor[1][1]=minor[1][1]*scalar1-minor[1][0]*scalar2;
-
-        major[0][1]=major[0][1]*scalar1-major[0][0]*scalar2;
-        major[1][1]=major[1][1]*scalar1-major[1][0]*scalar2;
-        major[2][1]=major[2][1]*scalar1-major[2][0]*scalar2;
-
-        major[0][5]=major[0][5]*scalar1-major[0][4]*scalar2;
-        major[1][5]=major[1][5]*scalar1-major[1][4]*scalar2;
-        major[2][5]=major[2][5]*scalar1-major[2][4]*scalar2;
-
-        scalar2=minor[0][3];
-        minor[1][3]=minor[1][3]*scalar1-minor[1][0]*scalar2;
-
-        major[0][3]=major[0][3]*scalar1-major[0][0]*scalar2;
-        major[1][3]=major[1][3]*scalar1-major[1][0]*scalar2;
-        major[2][3]=major[2][3]*scalar1-major[2][0]*scalar2;
-
-        major[0][7]=major[0][7]*scalar1-major[0][4]*scalar2;
-        major[1][7]=major[1][7]*scalar1-major[1][4]*scalar2;
-        major[2][7]=major[2][7]*scalar1-major[2][4]*scalar2;
-
-        /**
-         * Eliminate column 1 of rows 0 and 3
-         * R(3)=y1'*R(3)-y3'*R(1)
-         * R(0)=y1'*R(0)-(y0-y2)*R(1)
-         */
-
-        scalar1=minor[1][1];scalar2=minor[1][3];
-        major[0][3]=major[0][3]*scalar1-major[0][1]*scalar2;
-        major[1][3]=major[1][3]*scalar1-major[1][1]*scalar2;
-        major[2][3]=major[2][3]*scalar1-major[2][1]*scalar2;
-
-        major[0][7]=major[0][7]*scalar1-major[0][5]*scalar2;
-        major[1][7]=major[1][7]*scalar1-major[1][5]*scalar2;
-        major[2][7]=major[2][7]*scalar1-major[2][5]*scalar2;
-
-        scalar2=minor[1][0];
-        minor[0][0]=minor[0][0]*scalar1-minor[0][1]*scalar2;
-
-        major[0][0]=major[0][0]*scalar1-major[0][1]*scalar2;
-        major[1][0]=major[1][0]*scalar1-major[1][1]*scalar2;
-        major[2][0]=major[2][0]*scalar1-major[2][1]*scalar2;
+        int m = 8, n = 9;
+        std::vector<double> A(72, 0);
+        int cnt = 0;
+        for (int i = 0; i < 4; i++) {
+            const int smpl = 4*sample[i];
+            const double x1 = points[smpl], y1 = points[smpl+1], x2 = points[smpl+2], y2 = points[smpl+3];
+
+            A[cnt++] = -x1;
+            A[cnt++] = -y1;
+            A[cnt++] = -1;
+            cnt += 3; // skip zeros
+            A[cnt++] = x2*x1;
+            A[cnt++] = x2*y1;
+            A[cnt++] = x2;
+
+            cnt += 3;
+            A[cnt++] = -x1;
+            A[cnt++] = -y1;
+            A[cnt++] = -1;
+            A[cnt++] = y2*x1;
+            A[cnt++] = y2*y1;
+            A[cnt++] = y2;
+        }
 
-        major[0][4]=major[0][4]*scalar1-major[0][5]*scalar2;
-        major[1][4]=major[1][4]*scalar1-major[1][5]*scalar2;
-        major[2][4]=major[2][4]*scalar1-major[2][5]*scalar2;
+        if (!Math::eliminateUpperTriangular(A, m, n))
+            return 0;
 
-        /**
-         * Eliminate columns 0 and 1 of row 2
-         * R(0)/=x0'
-         * R(1)/=y1'
-         * R(2)-= (x2*R(0) + y2*R(1))
-         */
+        models = std::vector<Mat>{ Mat_<double>(3,3) };
+        auto * h = (double *) models[0].data;
+        h[8] = 1.;
 
-        scalar1=1.0f/minor[0][0];
-        major[0][0]*=scalar1;
-        major[1][0]*=scalar1;
-        major[2][0]*=scalar1;
-        major[0][4]*=scalar1;
-        major[1][4]*=scalar1;
-        major[2][4]*=scalar1;
-
-        scalar1=1.0f/minor[1][1];
-        major[0][1]*=scalar1;
-        major[1][1]*=scalar1;
-        major[2][1]*=scalar1;
-        major[0][5]*=scalar1;
-        major[1][5]*=scalar1;
-        major[2][5]*=scalar1;
-
-        scalar1=minor[0][2];scalar2=minor[1][2];
-        major[0][2]-=major[0][0]*scalar1+major[0][1]*scalar2;
-        major[1][2]-=major[1][0]*scalar1+major[1][1]*scalar2;
-        major[2][2]-=major[2][0]*scalar1+major[2][1]*scalar2;
-
-        major[0][6]-=major[0][4]*scalar1+major[0][5]*scalar2;
-        major[1][6]-=major[1][4]*scalar1+major[1][5]*scalar2;
-        major[2][6]-=major[2][4]*scalar1+major[2][5]*scalar2;
-
-        /* Only major matters now. R(3) and R(7) correspond to the hollowed-out rows. */
-        scalar1=major[0][7];
-        major[1][7]/=scalar1;
-        major[2][7]/=scalar1;
-        const double m17 = major[1][7], m27 = major[2][7];
-        scalar1=major[0][0];major[1][0]-=scalar1*m17;major[2][0]-=scalar1*m27;
-        scalar1=major[0][1];major[1][1]-=scalar1*m17;major[2][1]-=scalar1*m27;
-        scalar1=major[0][2];major[1][2]-=scalar1*m17;major[2][2]-=scalar1*m27;
-        scalar1=major[0][3];major[1][3]-=scalar1*m17;major[2][3]-=scalar1*m27;
-        scalar1=major[0][4];major[1][4]-=scalar1*m17;major[2][4]-=scalar1*m27;
-        scalar1=major[0][5];major[1][5]-=scalar1*m17;major[2][5]-=scalar1*m27;
-        scalar1=major[0][6];major[1][6]-=scalar1*m17;major[2][6]-=scalar1*m27;
-
-        /* One column left (Two in fact, but the last one is the homography) */
-        major[2][3]/=major[1][3];
-        const double m23 = major[2][3];
-
-        major[2][0]-=major[1][0]*m23;
-        major[2][1]-=major[1][1]*m23;
-        major[2][2]-=major[1][2]*m23;
-        major[2][4]-=major[1][4]*m23;
-        major[2][5]-=major[1][5]*m23;
-        major[2][6]-=major[1][6]*m23;
-        major[2][7]-=major[1][7]*m23;
-
-        // check if homography does not contain NaN values
-        for (int i = 0; i < 8; i++)
-            if (std::isnan(major[2][i])) return 0;
-
-        /* Homography is done. */
-        models = std::vector<Mat>(1, Mat_<double>(3,3));
-        auto * H_ = (double *) models[0].data;
-        H_[0]=major[2][0];
-        H_[1]=major[2][1];
-        H_[2]=major[2][2];
-
-        H_[3]=major[2][4];
-        H_[4]=major[2][5];
-        H_[5]=major[2][6];
-
-        H_[6]=major[2][7];
-        H_[7]=major[2][3];
-        H_[8]=1.0;
+        // start from the last row
+        for (int i = m-1; i >= 0; i--) {
+            double acc = 0;
+            for (int j = i+1; j < n; j++)
+                acc -= A[i*n+j]*h[j];
 
+            h[i] = acc / A[i*n+i];
+            // due to numerical errors return 0 solutions
+            if (std::isnan(h[i]))
+                return 0;
+        }
         return 1;
     }
 
@@ -280,7 +173,7 @@ public:
         Matx<double, 9, 9> Vt;
         Vec<double, 9> D;
         if (! eigen(Matx<double, 9, 9>(AtA), D, Vt)) return 0;
-        Mat H = Mat(Vt.row(8).reshape<3,3>());
+        Mat H = Mat_<double>(3, 3, Vt.val + 72/*=8*9*/);
 #endif
 
         models = std::vector<Mat>{ T2.inv() * H * T1 };
index fa0cc8f..ff8089b 100644 (file)
@@ -5,7 +5,6 @@
 #include "../precomp.hpp"
 #include "../usac.hpp"
 #include "opencv2/imgproc/detail/gcgraph.hpp"
-#include "gamma_values.hpp"
 
 namespace cv { namespace usac {
 class GraphCutImpl : public GraphCut {
@@ -47,7 +46,7 @@ public:
 
     bool refineModel (const Mat &best_model, const Score &best_model_score,
                       Mat &new_model, Score &new_model_score) override {
-        if (best_model_score.inlier_number < gc_sample_size)
+        if (best_model_score.inlier_number < estimator->getNonMinimalSampleSize())
             return false;
 
         // improve best model by non minimal estimation
@@ -69,24 +68,12 @@ public:
                             (lo_sampler->generateUniqueRandomSubset(labeling_inliers,
                                    labeling_inliers_size), gc_sample_size, gc_models, weights);
                 } else {
-                    if (iter > 0)
-                        break; // break inliers are not updated
+                    if (iter > 0) break; // break inliers are not updated
                     num_of_estimated_models = estimator->estimateModelNonMinimalSample
                             (labeling_inliers, labeling_inliers_size, gc_models, weights);
                 }
-                if (num_of_estimated_models == 0)
-                    break;
-
-                bool zero_inliers = false;
                 for (int model_idx = 0; model_idx < num_of_estimated_models; model_idx++) {
-                    Score gc_temp_score = quality->getScore(gc_models[model_idx]);
-                    if (gc_temp_score.inlier_number == 0){
-                        zero_inliers = true; break;
-                    }
-
-                    if (best_model_score.isBetter(gc_temp_score))
-                        continue;
-
+                    const Score gc_temp_score = quality->getScore(gc_models[model_idx]);
                     // store the best model from estimated models
                     if (gc_temp_score.isBetter(new_model_score)) {
                         is_best_model_updated = true;
@@ -94,9 +81,6 @@ public:
                         gc_models[model_idx].copyTo(new_model);
                     }
                 }
-
-                if (zero_inliers)
-                    break;
             } // end of inner GC local optimization
         } // end of while loop
 
@@ -119,10 +103,8 @@ private:
         // Estimate the vertex capacities
         for (int pt = 0; pt < points_size; pt++) {
             tmp_squared_distance = errors[pt];
-            if (std::isnan(tmp_squared_distance)) {
-                energies[pt] = std::numeric_limits<float>::max();
-                continue;
-            }
+            if (std::isnan(tmp_squared_distance))
+                tmp_squared_distance = std::numeric_limits<float>::max();
             energy = tmp_squared_distance / sqr_trunc_thr; // Truncated quadratic cost
 
             if (tmp_squared_distance <= sqr_trunc_thr)
@@ -130,8 +112,7 @@ private:
             else
                 graph.addTermWeights(pt, one_minus_lambda * energy, 0);
 
-            if (energy > 1) energy = 1;
-            energies[pt] = energy;
+            energies[pt] = energy > 1 ? 1 : energy;
         }
 
         std::fill(used_edges.begin(), used_edges.end(), false);
@@ -184,7 +165,7 @@ private:
     }
     Ptr<LocalOptimization> clone(int state) const override {
         return makePtr<GraphCutImpl>(estimator->clone(), error->clone(), quality->clone(),
-                neighborhood_graph,lo_sampler->clone(state), sqrt(sqr_trunc_thr / 2),
+                neighborhood_graph,lo_sampler->clone(state), sqr_trunc_thr / 2.25,
                 spatial_coherence, lo_inner_iterations);
     }
 };
@@ -257,12 +238,11 @@ public:
      */
     bool refineModel (const Mat &so_far_the_best_model, const Score &best_model_score,
                       Mat &new_model, Score &new_model_score) override {
-        if (best_model_score.inlier_number < lo_sample_size)
+        if (best_model_score.inlier_number < estimator->getNonMinimalSampleSize())
             return false;
 
         so_far_the_best_model.copyTo(new_model);
         new_model_score = best_model_score;
-
         // get inliers from so far the best model.
         int num_inliers_of_best_model = quality->getInliers(so_far_the_best_model,
                                                            inliers_of_best_model);
@@ -276,7 +256,6 @@ public:
                 num_estimated_models = estimator->estimateModelNonMinimalSample
                         (lo_sampler->generateUniqueRandomSubset(inliers_of_best_model,
                                 num_inliers_of_best_model), lo_sample_size, lo_models, weights);
-                if (num_estimated_models == 0) continue;
             } else {
                 // if model was not updated in first iteration, so break.
                 if (iters > 0) break;
@@ -284,12 +263,11 @@ public:
                 // if it fails -> end Lo.
                 num_estimated_models = estimator->estimateModelNonMinimalSample
                     (inliers_of_best_model, num_inliers_of_best_model, lo_models, weights);
-                if (num_estimated_models == 0) return false;
             }
 
             //////// Choose the best lo_model from estimated lo_models.
             for (int model_idx = 0; model_idx < num_estimated_models; model_idx++) {
-                Score temp_score = quality->getScore(lo_models[model_idx]);
+                const Score temp_score = quality->getScore(lo_models[model_idx]);
                 if (temp_score.isBetter(new_model_score)) {
                     new_model_score = temp_score;
                     lo_models[model_idx].copyTo(new_model);
@@ -323,26 +301,24 @@ public:
                     if (num_estimated_models == 0) break;
 
                     // Get score and update virtual inliers with current threshold
-                    //////// Choose the best lo_iter_model from estimated lo_iter_models.
+                    ////// Choose the best lo_iter_model from estimated lo_iter_models.
                     lo_iter_models[0].copyTo(lo_iter_model);
                     lo_iter_score = quality->getScore(lo_iter_model);
                     for (int model_idx = 1; model_idx < num_estimated_models; model_idx++) {
-                        Score temp_score = quality->getScore(lo_iter_models[model_idx]);
+                        const Score temp_score = quality->getScore(lo_iter_models[model_idx]);
                         if (temp_score.isBetter(lo_iter_score)) {
                             lo_iter_score = temp_score;
                             lo_iter_models[model_idx].copyTo(lo_iter_model);
                         }
                     }
 
-                    virtual_inliers_size = quality->getInliers(lo_iter_model, virtual_inliers, lo_threshold);
+                    if (iterations != lo_iter_max_iterations-1)
+                        virtual_inliers_size = quality->getInliers(lo_iter_model, virtual_inliers, lo_threshold);
                 }
-                if (fabs (lo_threshold - threshold) < FLT_EPSILON) {
-                    // Success, threshold does not differ
-                    // last score correspond to user-defined threshold. Inliers are real.
-                    if (lo_iter_score.isBetter(new_model_score)) {
-                        new_model_score = lo_iter_score;
-                        lo_iter_model.copyTo(new_model);
-                    }
+
+                if (lo_iter_score.isBetter(new_model_score)) {
+                    new_model_score = lo_iter_score;
+                    lo_iter_model.copyTo(new_model);
                 }
             }
 
@@ -375,6 +351,7 @@ private:
     const Ptr<Quality> quality;
     const Ptr<Error> error;
     const Ptr<ModelVerifier> verifier;
+    const GammaValues& gamma_generator;
     // The degrees of freedom of the data from which the model is estimated.
     // E.g., for models coming from point correspondences (x1,y1,x2,y2), it is 4.
     const int degrees_of_freedom;
@@ -394,25 +371,26 @@ private:
     double C_times_two_ad_dof;
     // Calculating the gamma value of (DoF - 1) / 2 which will be used for the estimation and,
     // due to being constant, it is better to calculate it a priori.
-    double gamma_value, squared_sigma_max_2, one_over_sigma;
+    double squared_sigma_max_2, one_over_sigma;
     // Calculating the upper incomplete gamma value of (DoF - 1) / 2 with k^2 / 2.
     const double gamma_k;
     // Calculating the lower incomplete gamma value of (DoF - 1) / 2 which will be used for the estimation and,
     // due to being constant, it is better to calculate it a priori.
-    double gamma_difference;
+    double max_sigma_sqr;
     const int points_size, number_of_irwls_iters;
     const double maximum_threshold, max_sigma;
 
-    std::vector<double> residuals, sigma_weights, stored_gamma_values;
-    std::vector<int> residuals_idxs;
+    std::vector<double> sqr_residuals, sigma_weights;
+    std::vector<int> sqr_residuals_idxs;
     // Models fit by weighted least-squares fitting
     std::vector<Mat> sigma_models;
     // Points used in the weighted least-squares fitting
     std::vector<int> sigma_inliers;
     // Weights used in the the weighted least-squares fitting
-    int max_lo_sample_size;
+    int max_lo_sample_size, stored_gamma_number_min1;
     double scale_of_stored_gammas;
     RNG rng;
+    const std::vector<double> &stored_gamma_values;
 public:
 
     SigmaConsensusImpl (const Ptr<Estimator> &estimator_, const Ptr<Error> &error_,
@@ -420,103 +398,93 @@ public:
         int max_lo_sample_size_, int number_of_irwls_iters_, int DoF,
         double sigma_quantile, double upper_incomplete_of_sigma_quantile, double C_,
         double maximum_thr) : estimator (estimator_), quality(quality_),
-          error (error_), verifier(verifier_), degrees_of_freedom(DoF),
-          k (sigma_quantile), C(C_), sample_size(estimator_->getMinimalSampleSize()),
+          error (error_), verifier(verifier_),
+          gamma_generator(GammaValues::getSingleton()),
+          degrees_of_freedom(DoF), k (sigma_quantile), C(C_),
+          sample_size(estimator_->getMinimalSampleSize()),
           gamma_k (upper_incomplete_of_sigma_quantile), points_size (quality_->getPointsSize()),
           number_of_irwls_iters (number_of_irwls_iters_),
-          maximum_threshold(maximum_thr), max_sigma (maximum_thr) {
-
+          maximum_threshold(maximum_thr), max_sigma (maximum_thr),
+          stored_gamma_values(gamma_generator.getGammaValues())
+    {
         dof_minus_one_per_two = (degrees_of_freedom - 1.0) / 2.0;
         two_ad_dof = std::pow(2.0, dof_minus_one_per_two);
         C_times_two_ad_dof = C * two_ad_dof;
-        gamma_value = tgamma(dof_minus_one_per_two);
-        gamma_difference = gamma_value - gamma_k;
         // Calculate 2 * \sigma_{max}^2 a priori
         squared_sigma_max_2 = max_sigma * max_sigma * 2.0;
         // Divide C * 2^(DoF - 1) by \sigma_{max} a priori
         one_over_sigma = C_times_two_ad_dof / max_sigma;
-
-        residuals = std::vector<double>(points_size);
-        residuals_idxs = std::vector<int>(points_size);
+        max_sigma_sqr = squared_sigma_max_2 * 0.5;
+        sqr_residuals = std::vector<double>(points_size);
+        sqr_residuals_idxs = std::vector<int>(points_size);
         sigma_inliers = std::vector<int>(points_size);
         max_lo_sample_size = max_lo_sample_size_;
         sigma_weights = std::vector<double>(points_size);
         sigma_models = std::vector<Mat>(estimator->getMaxNumSolutionsNonMinimal());
-
-        if (DoF == 4) {
-            scale_of_stored_gammas = scale_of_stored_gammas_n4;
-            stored_gamma_values = std::vector<double>(stored_gamma_values_n4,
-                    stored_gamma_values_n4+stored_gamma_number+1);
-        } else if (DoF == 5) {
-            scale_of_stored_gammas = scale_of_stored_gammas_n5;
-            stored_gamma_values = std::vector<double>(stored_gamma_values_n5,
-                    stored_gamma_values_n5+stored_gamma_number+1);
-        } else
-            CV_Error(cv::Error::StsNotImplemented, "Sigma values are not generated");
+        stored_gamma_number_min1 = gamma_generator.getTableSize()-1;
+        scale_of_stored_gammas = gamma_generator.getScaleOfGammaValues();
     }
 
     // https://github.com/danini/magsac
-    bool refineModel (const Mat &in_model, const Score &in_model_score,
+    bool refineModel (const Mat &in_model, const Score &best_model_score,
                       Mat &new_model, Score &new_model_score) override {
         int residual_cnt = 0;
 
-         if (verifier->isModelGood(in_model)) {
-             if (verifier->hasErrors()) {
-                 const std::vector<float> &errors = verifier->getErrors();
-                 for (int point_idx = 0; point_idx < points_size; ++point_idx) {
-                     // Calculate the residual of the current point
-                     const auto residual = sqrtf(errors[point_idx]);
-                     if (max_sigma > residual) {
-                         // Store the residual of the current point and its index
-                         residuals[residual_cnt] = residual;
-                         residuals_idxs[residual_cnt++] = point_idx;
-                     }
-
-                     // Interrupt if there is no chance of being better
-                     if (residual_cnt + points_size - point_idx < in_model_score.inlier_number)
-                         return false;
-                 }
-             } else {
+        if (verifier->isModelGood(in_model)) {
+            if (verifier->hasErrors()) {
+                const std::vector<float> &errors = verifier->getErrors();
+                for (int point_idx = 0; point_idx < points_size; ++point_idx) {
+                    // Calculate the residual of the current point
+                    const auto residual = sqrtf(errors[point_idx]);
+                    if (max_sigma > residual) {
+                        // Store the residual of the current point and its index
+                        sqr_residuals[residual_cnt] = residual;
+                        sqr_residuals_idxs[residual_cnt++] = point_idx;
+                    }
+
+                    // Interrupt if there is no chance of being better
+                    if (residual_cnt + points_size - point_idx < best_model_score.inlier_number)
+                        return false;
+                }
+            } else {
                 error->setModelParameters(in_model);
 
                 for (int point_idx = 0; point_idx < points_size; ++point_idx) {
-                    const double residual = sqrtf(error->getError(point_idx));
-                    if (max_sigma > residual) {
+                    const double sqr_residual = error->getError(point_idx);
+                    if (sqr_residual < max_sigma_sqr) {
                         // Store the residual of the current point and its index
-                        residuals[residual_cnt] = residual;
-                        residuals_idxs[residual_cnt++] = point_idx;
+                        sqr_residuals[residual_cnt] = sqr_residual;
+                        sqr_residuals_idxs[residual_cnt++] = point_idx;
                     }
 
-                    if (residual_cnt + points_size - point_idx < in_model_score.inlier_number)
+                    if (residual_cnt + points_size - point_idx < best_model_score.inlier_number)
                         return false;
                 }
-             }
-         } else return false;
+            }
+        } else return false;
 
-        // Initialize the polished model with the initial one
-        Mat polished_model;
-        in_model.copyTo(polished_model);
-        // A flag to determine if the initial model has been updated
-        bool updated = false;
+        in_model.copyTo(new_model);
+        new_model_score = Score();
 
         // Do the iteratively re-weighted least squares fitting
-        for (int iterations = 0; iterations < number_of_irwls_iters; ++iterations) {
+        for (int iterations = 0; iterations < number_of_irwls_iters; iterations++) {
             int sigma_inliers_cnt = 0;
             // If the current iteration is not the first, the set of possibly inliers
             // (i.e., points closer than the maximum threshold) have to be recalculated.
             if (iterations > 0) {
-                error->setModelParameters(polished_model);
+                // error->setModelParameters(polished_model);
+                error->setModelParameters(new_model);
                 // Remove everything from the residual vector
                 residual_cnt = 0;
 
                 // Collect the points which are closer than the maximum threshold
                 for (int point_idx = 0; point_idx < points_size; ++point_idx) {
                     // Calculate the residual of the current point
-                    const double residual = error->getError(point_idx);
-                    if (residual < max_sigma) {
+                    const double sqr_residual = error->getError(point_idx);
+                    if (sqr_residual < max_sigma_sqr) {
                         // Store the residual of the current point and its index
-                        residuals[residual_cnt] = residual;
-                        residuals_idxs[residual_cnt++] = point_idx;
+                        sqr_residuals[residual_cnt] = sqr_residual;
+                        sqr_residuals_idxs[residual_cnt++] = point_idx;
                     }
                 }
                 sigma_inliers_cnt = 0;
@@ -524,54 +492,39 @@ public:
 
             // Calculate the weight of each point
             for (int i = 0; i < residual_cnt; i++) {
-                const double residual = residuals[i];
-                const int idx = residuals_idxs[i];
-                // If the residual is ~0, the point fits perfectly and it is handled differently
-                if (residual > std::numeric_limits<double>::epsilon()) {
-                    // Calculate the squared residual
-                    const double squared_residual = residual * residual;
-                    // Get the position of the gamma value in the lookup table
-                    int x = (int)round(scale_of_stored_gammas * squared_residual
-                            / squared_sigma_max_2);
-
-                    // If the sought gamma value is not stored in the lookup, return the closest element
-                    if (x >= stored_gamma_number || x < 0 /*overflow*/) // actual number of gamma values is 1 more, so >=
-                        x  = stored_gamma_number;
-
-                    sigma_inliers[sigma_inliers_cnt] = idx; // store index of point for LSQ
-                    sigma_weights[sigma_inliers_cnt++] = one_over_sigma * (stored_gamma_values[x] - gamma_k);
-                }
+                // Get the position of the gamma value in the lookup table
+                int x = (int)round(scale_of_stored_gammas * sqr_residuals[i]
+                        / squared_sigma_max_2);
+
+                // If the sought gamma value is not stored in the lookup, return the closest element
+                if (x >= stored_gamma_number_min1 || x < 0 /*overflow*/) // actual number of gamma values is 1 more, so >=
+                    x  = stored_gamma_number_min1;
+
+                sigma_inliers[sigma_inliers_cnt] = sqr_residuals_idxs[i]; // store index of point for LSQ
+                sigma_weights[sigma_inliers_cnt++] = one_over_sigma * (stored_gamma_values[x] - gamma_k);
             }
 
+            // random shuffle sigma inliers
             if (sigma_inliers_cnt > max_lo_sample_size)
                 for (int i = sigma_inliers_cnt-1; i > 0; i--) {
                     const int idx = rng.uniform(0, i+1);
                     std::swap(sigma_inliers[i], sigma_inliers[idx]);
                     std::swap(sigma_weights[i], sigma_weights[idx]);
                 }
-            int num_est_models = estimator->estimateModelNonMinimalSample
+            const int num_est_models = estimator->estimateModelNonMinimalSample
                   (sigma_inliers, std::min(max_lo_sample_size, sigma_inliers_cnt),
                           sigma_models, sigma_weights);
 
-            // If there are fewer than the minimum point close to the model, terminate.
-            // Estimate the model parameters using weighted least-squares fitting
-            if (num_est_models == 0) {
-                // If the estimation failed and the iteration was never successfull,
-                // terminate with failure.
-                if (iterations == 0)
-                    return false;
-                // Otherwise, if the iteration was successfull at least one,
-                // simply break it.
-                break;
-            }
+            if (num_est_models == 0)
+                break; // break iterations
 
             // Update the model parameters
-            polished_model = sigma_models[0];
+            Mat polished_model = sigma_models[0];
             if (num_est_models > 1) {
                 // find best over other models
                 Score sigma_best_score = quality->getScore(polished_model);
                 for (int m = 1; m < num_est_models; m++) {
-                    Score sc = quality->getScore(sigma_models[m]);
+                    const Score sc = quality->getScore(sigma_models[m]);
                     if (sc.isBetter(sigma_best_score)) {
                         polished_model = sigma_models[m];
                         sigma_best_score = sc;
@@ -579,21 +532,25 @@ public:
                 }
             }
 
-            // The model has been updated
-            updated = true;
+            const Score polished_model_score = quality->getScore(polished_model);
+            if (polished_model_score.isBetter(new_model_score)){
+                new_model_score = polished_model_score;
+                polished_model.copyTo(new_model);
+            }
         }
 
-        if (updated) {
-            new_model_score = quality->getScore(polished_model);
-            new_model = polished_model;
-            return true;
+        const Score in_model_score = quality->getScore(in_model);
+        if (in_model_score.isBetter(new_model_score)) {
+            new_model_score = in_model_score;
+            in_model.copyTo(new_model);
         }
-        return false;
+
+        return true;
     }
     Ptr<LocalOptimization> clone(int state) const override {
         return makePtr<SigmaConsensusImpl>(estimator->clone(), error->clone(), quality->clone(),
-                verifier->clone(state), max_lo_sample_size, number_of_irwls_iters,
-                degrees_of_freedom, k, gamma_k, C, maximum_threshold);
+                verifier->clone(state), max_lo_sample_size,
+                number_of_irwls_iters, degrees_of_freedom, k, gamma_k, C, maximum_threshold);
     }
 };
 Ptr<SigmaConsensus>
@@ -602,9 +559,9 @@ SigmaConsensus::create(const Ptr<Estimator> &estimator_, const Ptr<Error> &error
         int max_lo_sample_size, int number_of_irwls_iters_, int DoF,
         double sigma_quantile, double upper_incomplete_of_sigma_quantile, double C_,
         double maximum_thr) {
-    return makePtr<SigmaConsensusImpl>(estimator_, error_, quality, verifier_, max_lo_sample_size,
-            number_of_irwls_iters_, DoF, sigma_quantile, upper_incomplete_of_sigma_quantile,
-            C_, maximum_thr);
+    return makePtr<SigmaConsensusImpl>(estimator_, error_, quality, verifier_,
+            max_lo_sample_size, number_of_irwls_iters_, DoF, sigma_quantile,
+            upper_incomplete_of_sigma_quantile, C_, maximum_thr);
 }
 
 /////////////////////////////////////////// FINAL MODEL POLISHER ////////////////////////
@@ -612,7 +569,6 @@ class LeastSquaresPolishingImpl : public LeastSquaresPolishing {
 private:
     const Ptr<Estimator> estimator;
     const Ptr<Quality> quality;
-    Score score;
     int lsq_iterations;
     std::vector<int> inliers;
     std::vector<Mat> models;
@@ -646,8 +602,7 @@ public:
             const int num_models = estimator->estimateModelNonMinimalSample(inliers,
                                                       inlier_number, models, weights);
             for (int model_idx = 0; model_idx < num_models; model_idx++) {
-                score = quality->getScore(models[model_idx]);
-
+                const Score score = quality->getScore(models[model_idx]);
                 if (best_model_score.isBetter(score))
                     continue;
                 if (score.isBetter(out_score)) {
index 99f344c..e095eef 100644 (file)
@@ -71,7 +71,7 @@ public:
     */
 
     int estimate (const std::vector<int> &sample, std::vector<Mat> &models) const override {
-        std::vector<double> A1 (5*12, 0), A2(7*8, 0);
+        std::vector<double> A1 (60, 0), A2(56, 0); // 5x12, 7x8
 
         int cnt1 = 0, cnt2 = 0;
         for (int i = 0; i < 6; i++) {
@@ -100,6 +100,7 @@ public:
             A2[cnt2++] = -v * Z;
             A2[cnt2++] = -v;
         }
+        // matrix is sparse -> do not test for singularity
         Math::eliminateUpperTriangular(A1, 5, 12);
 
         int offset = 4*12;
@@ -107,7 +108,9 @@ public:
         for (int i = 0; i < 8; i++)
             A2[cnt2++] = A1[offset + i + 4/* skip 4 first cols*/];
 
-        Math::eliminateUpperTriangular(A2, 7, 8);
+        // must be full-rank
+        if (!Math::eliminateUpperTriangular(A2, 7, 8))
+            return 0;
         // fixed scale to 1. In general the projection matrix is up-to-scale.
         // P = alpha * P^, alpha = 1 / P^_[3,4]
 
index 8e438e7..856dbb5 100644 (file)
@@ -4,7 +4,6 @@
 
 #include "../precomp.hpp"
 #include "../usac.hpp"
-#include "gamma_values.hpp"
 
 namespace cv { namespace usac {
 int Quality::getInliers(const Ptr<Error> &error, const Mat &model, std::vector<int> &inliers, double threshold) {
@@ -79,11 +78,13 @@ protected:
     const Ptr<Error> error;
     const int points_size;
     const double threshold;
-    double best_score;
+    double best_score, norm_thr, one_over_thr;
 public:
     MsacQualityImpl (int points_size_, double threshold_, const Ptr<Error> &error_)
             : error (error_), points_size (points_size_), threshold (threshold_) {
         best_score = std::numeric_limits<double>::max();
+        norm_thr = threshold*9/4;
+        one_over_thr = 1 / norm_thr;
     }
 
     inline Score getScore (const Mat &model) const override {
@@ -92,12 +93,12 @@ public:
         int inlier_number = 0;
         for (int point = 0; point < points_size; point++) {
             err = error->getError(point);
-            if (err < threshold) {
-                sum_errors += err;
-                inlier_number++;
-            } else
-                sum_errors += threshold;
-            if (sum_errors > best_score)
+            if (err < norm_thr) {
+                sum_errors -= (1 - err * one_over_thr);
+                if (err < threshold)
+                    inlier_number++;
+            }
+            if (sum_errors - points_size + point > best_score)
                 break;
         }
         return Score(inlier_number, sum_errors);
@@ -127,17 +128,16 @@ Ptr<MsacQuality> MsacQuality::create(int points_size_, double threshold_,
 class MagsacQualityImpl : public MagsacQuality {
 private:
     const Ptr<Error> error;
+    const GammaValues& gamma_generator;
     const int points_size;
 
     // for example, maximum standard deviation of noise.
-    const double maximum_threshold, tentative_inlier_threshold;
+    const double maximum_threshold_sqr, tentative_inlier_threshold;
     // The degrees of freedom of the data from which the model is estimated.
     // E.g., for models coming from point correspondences (x1,y1,x2,y2), it is 4.
     const int degrees_of_freedom;
     // A 0.99 quantile of the Chi^2-distribution to convert sigma values to residuals
     const double k;
-    // A multiplier to convert residual values to sigmas
-    float threshold_to_sigma_multiplier;
     // Calculating k^2 / 2 which will be used for the estimation and,
     // due to being constant, it is better to calculate it a priori.
     double squared_k_per_2;
@@ -167,54 +167,57 @@ private:
     float maximum_sigma_2_per_2;
     // Calculate 2 * \sigma_{max}^2
     float maximum_sigma_2_times_2;
-    // Calculate the loss implied by an outlier
-    double outlier_loss;
     // Calculating 2^(DoF + 1) / \sigma_{max} which will be used for the estimation and,
     // due to being constant, it is better to calculate it a priori.
     double two_ad_dof_plus_one_per_maximum_sigma;
     double scale_of_stored_incomplete_gammas;
-    std::vector<double> stored_complete_gamma_values, stored_lower_incomplete_gamma_values;
+    double max_loss;
+    const std::vector<double> &stored_complete_gamma_values, &stored_lower_incomplete_gamma_values;
+    int stored_incomplete_gamma_number_min1;
 public:
 
     MagsacQualityImpl (double maximum_thr, int points_size_, const Ptr<Error> &error_,
                        double tentative_inlier_threshold_, int DoF, double sigma_quantile,
                        double upper_incomplete_of_sigma_quantile,
                        double lower_incomplete_of_sigma_quantile, double C_)
-            : error (error_), points_size(points_size_), maximum_threshold(maximum_thr),
+            : error (error_), gamma_generator(GammaValues::getSingleton()), points_size(points_size_),
+            maximum_threshold_sqr(maximum_thr*maximum_thr),
             tentative_inlier_threshold(tentative_inlier_threshold_), degrees_of_freedom(DoF),
             k(sigma_quantile), C(C_), gamma_value_of_k (upper_incomplete_of_sigma_quantile),
-            lower_gamma_value_of_k (lower_incomplete_of_sigma_quantile) {
+            lower_gamma_value_of_k (lower_incomplete_of_sigma_quantile),
+            stored_complete_gamma_values(gamma_generator.getCompleteGammaValues()),
+            stored_lower_incomplete_gamma_values(gamma_generator.getIncompleteGammaValues())
+    {
         previous_best_loss = std::numeric_limits<double>::max();
-        threshold_to_sigma_multiplier = 1.f / (float)k;
         squared_k_per_2 = k * k / 2.0;
         dof_minus_one_per_two = (degrees_of_freedom - 1.0) / 2.0;
         dof_plus_one_per_two = (degrees_of_freedom + 1.0) / 2.0;
         two_ad_dof_minus_one = std::pow(2.0, dof_minus_one_per_two);
         two_ad_dof_plus_one = std::pow(2.0, dof_plus_one_per_two);
-        maximum_sigma = threshold_to_sigma_multiplier * (float)maximum_threshold;
+        maximum_sigma = (float)sqrt(maximum_threshold_sqr) / (float) k;
         maximum_sigma_2 = maximum_sigma * maximum_sigma;
         maximum_sigma_2_per_2 = maximum_sigma_2 / 2.f;
         maximum_sigma_2_times_2 = maximum_sigma_2 * 2.f;
-        // penalization for outlier
-        outlier_loss = 10 * maximum_sigma * two_ad_dof_minus_one  * lower_gamma_value_of_k;
         two_ad_dof_plus_one_per_maximum_sigma = two_ad_dof_plus_one / maximum_sigma;
-
-        if (DoF == 4) {
-            scale_of_stored_incomplete_gammas = scale_of_stored_incomplete_gammas_n4;
-            stored_complete_gamma_values = std::vector<double>(stored_complete_gamma_values_n4,
-                      stored_complete_gamma_values_n4+stored_incomplete_gamma_number+1);
-            stored_lower_incomplete_gamma_values = std::vector<double>
-                    (stored_lower_incomplete_gamma_values_n4,
-                     stored_lower_incomplete_gamma_values_n4+stored_incomplete_gamma_number+1);
-        } else if (DoF == 5) {
-            scale_of_stored_incomplete_gammas = scale_of_stored_incomplete_gammas_n5;
-            stored_complete_gamma_values = std::vector<double>(stored_complete_gamma_values_n5,
-                 stored_complete_gamma_values_n5+stored_incomplete_gamma_number+1);
-            stored_lower_incomplete_gamma_values = std::vector<double>
-                    (stored_lower_incomplete_gamma_values_n5,
-                     stored_lower_incomplete_gamma_values_n5+stored_incomplete_gamma_number+1);
-        } else
-            CV_Error(cv::Error::StsNotImplemented, "Sigma values are not generated");
+        scale_of_stored_incomplete_gammas = gamma_generator.getScaleOfGammaCompleteValues();
+        stored_incomplete_gamma_number_min1 = gamma_generator.getTableSize()-1;
+        max_loss = 1e-10;
+        // MAGSAC maximum / minimum loss does not have to be in extrumum residuals
+        // make 50 iterations to find maximum loss
+        const double step = maximum_threshold_sqr / 30;
+        double sqr_res = 0;
+        while (sqr_res < maximum_threshold_sqr) {
+            int x=(int)round(scale_of_stored_incomplete_gammas * sqr_res
+                        / maximum_sigma_2_times_2);
+            if (x >= stored_incomplete_gamma_number_min1 || x < 0 /*overflow*/)
+                x  = stored_incomplete_gamma_number_min1;
+            const double loss = two_ad_dof_plus_one_per_maximum_sigma * (maximum_sigma_2_per_2 *
+                    stored_lower_incomplete_gamma_values[x] + sqr_res * 0.25 *
+                    (stored_complete_gamma_values[x] - gamma_value_of_k));
+            if (max_loss < loss)
+                max_loss = loss;
+            sqr_res += step;
+        }
     }
 
     // https://github.com/danini/magsac
@@ -226,20 +229,20 @@ public:
             const float squared_residual = error->getError(point_idx);
             if (squared_residual < tentative_inlier_threshold)
                 num_tentative_inliers++;
-            if (squared_residual < maximum_threshold) { // consider point as inlier
+            if (squared_residual < maximum_threshold_sqr) { // consider point as inlier
                 // Get the position of the gamma value in the lookup table
                 int x=(int)round(scale_of_stored_incomplete_gammas * squared_residual
                         / maximum_sigma_2_times_2);
                 // If the sought gamma value is not stored in the lookup, return the closest element
-                if (x >= stored_incomplete_gamma_number || x < 0 /*overflow*/)
-                    x  = stored_incomplete_gamma_number;
+                if (x >= stored_incomplete_gamma_number_min1 || x < 0 /*overflow*/)
+                    x  = stored_incomplete_gamma_number_min1;
                 // Calculate the loss implied by the current point
-                total_loss += two_ad_dof_plus_one_per_maximum_sigma * (maximum_sigma_2_per_2 *
+                total_loss -= (1 - two_ad_dof_plus_one_per_maximum_sigma * (maximum_sigma_2_per_2 *
                     stored_lower_incomplete_gamma_values[x] + squared_residual * 0.25 *
-                    (stored_complete_gamma_values[x] - gamma_value_of_k));
-            } else total_loss += outlier_loss; // outlier
-            if (total_loss > previous_best_loss)
-                break; // break if total loss is alreay higher than the best one
+                    (stored_complete_gamma_values[x] - gamma_value_of_k)) / max_loss);
+            }
+            if (total_loss - (points_size - point_idx) > previous_best_loss)
+                break;
         }
         return Score(num_tentative_inliers, total_loss);
     }
@@ -251,16 +254,16 @@ public:
             const float squared_residual = errors[point_idx];
             if (squared_residual < tentative_inlier_threshold)
                 num_tentative_inliers++;
-            if (squared_residual < maximum_threshold) {
+            if (squared_residual < maximum_threshold_sqr) {
                 int x=(int)round(scale_of_stored_incomplete_gammas * squared_residual
                                  / maximum_sigma_2_times_2);
-                if (x >= stored_incomplete_gamma_number || x < 0 /*overflow*/)
-                    x  = stored_incomplete_gamma_number;
-                total_loss += two_ad_dof_plus_one_per_maximum_sigma * (maximum_sigma_2_per_2 *
+                if (x >= stored_incomplete_gamma_number_min1 || x < 0 /*overflow*/)
+                    x  = stored_incomplete_gamma_number_min1;
+                total_loss -= (1 - two_ad_dof_plus_one_per_maximum_sigma * (maximum_sigma_2_per_2 *
                         stored_lower_incomplete_gamma_values[x] + squared_residual * 0.25 *
-                        (stored_complete_gamma_values[x] - gamma_value_of_k));
-            } else total_loss += outlier_loss;
-            if (total_loss > previous_best_loss)
+                        (stored_complete_gamma_values[x] - gamma_value_of_k)) / max_loss);
+            }
+            if (total_loss - (points_size - point_idx) > previous_best_loss)
                 break;
         }
         return Score(num_tentative_inliers, total_loss);
@@ -279,8 +282,8 @@ public:
     int getPointsSize () const override { return points_size; }
     Ptr<Quality> clone () const override {
         return makePtr<MagsacQualityImpl>(maximum_sigma, points_size, error->clone(),
-                tentative_inlier_threshold, degrees_of_freedom, k, gamma_value_of_k,
-                lower_gamma_value_of_k, C);
+                tentative_inlier_threshold, degrees_of_freedom,
+                k, gamma_value_of_k, lower_gamma_value_of_k, C);
     }
 };
 Ptr<MagsacQuality> MagsacQuality::create(double maximum_thr, int points_size_, const Ptr<Error> &error_,
@@ -354,7 +357,7 @@ private:
     int highest_inlier_number, current_sprt_idx; // i
     // time t_M needed to instantiate a model hypothesis given a sample
     // Let m_S be the number of models that are verified per sample
-    const double inlier_threshold, t_M, m_S;
+    const double inlier_threshold, norm_thr, one_over_thr, t_M, m_S;
 
     double lowest_sum_errors, current_epsilon, current_delta, current_A,
             delta_to_epsilon, complement_delta_to_complement_epsilon;
@@ -371,7 +374,8 @@ public:
           double inlier_threshold_, double prob_pt_of_good_model, double prob_pt_of_bad_model,
           double time_sample, double avg_num_models, ScoreMethod score_type_) : rng(state), err(err_),
           points_size(points_size_), inlier_threshold (inlier_threshold_),
-          t_M (time_sample), m_S (avg_num_models), score_type (score_type_) {
+          norm_thr(inlier_threshold_*9/4), one_over_thr (1/norm_thr), t_M (time_sample),
+          m_S (avg_num_models), score_type (score_type_) {
 
         // Generate array of random points for randomized evaluation
         points_random_pool = std::vector<int> (points_size_);
@@ -439,8 +443,9 @@ public:
                     break;
             }
             if (score_type == ScoreMethod::SCORE_METHOD_MSAC) {
-                sum_errors += error < inlier_threshold ? error : inlier_threshold;
-                if (sum_errors > lowest_sum_errors)
+                if (error < norm_thr)
+                    sum_errors -= (1 - error * one_over_thr);
+                if (sum_errors - points_size + tested_point > lowest_sum_errors)
                     break;
             } else if (score_type == ScoreMethod::SCORE_METHOD_RANSAC) {
                 if (tested_inliers + points_size - tested_point < highest_inlier_number)
@@ -455,7 +460,8 @@ public:
             score.inlier_number = tested_inliers;
             if (score_type == ScoreMethod::SCORE_METHOD_MSAC) {
                 score.score = sum_errors;
-                lowest_sum_errors = sum_errors;
+                if (lowest_sum_errors > sum_errors)
+                    lowest_sum_errors = sum_errors;
             } else if (score_type == ScoreMethod::SCORE_METHOD_RANSAC)
                 score.score = -static_cast<double>(tested_inliers);
             else if (score_type == ScoreMethod::SCORE_METHOD_LMEDS)
index 5372dbc..65fa2d3 100644 (file)
@@ -119,12 +119,25 @@ public:
         // check if LO
         const bool LO = params->getLO() != LocalOptimMethod::LOCAL_OPTIM_NULL;
         const bool is_magsac = params->getLO() == LocalOptimMethod::LOCAL_OPTIM_SIGMA;
-        const int repeat_magsac = 10;
+        const int max_hyp_test_before_ver = params->getMaxNumHypothesisToTestBeforeRejection();
+        const int repeat_magsac = 10, max_iters_before_LO = params->getMaxItersBeforeLO();
         Score best_score;
         Mat best_model;
         int final_iters;
 
         if (! parallel) {
+            auto update_best = [&] (const Mat &new_model, const Score &new_score) {
+                best_score = new_score;
+                // remember best model
+                new_model.copyTo(best_model);
+                // update quality and verifier to save evaluation time of a model
+                _quality->setBestScore(best_score.score);
+                // update verifier
+                _model_verifier->update(best_score.inlier_number);
+                // update upper bound of iterations
+                return _termination_criteria->update(best_model, best_score.inlier_number);
+            };
+            bool was_LO_run = false;
             Mat non_degenerate_model, lo_model;
             Score current_score, lo_score, non_denegenerate_model_score;
 
@@ -139,65 +152,54 @@ public:
                 const int number_of_models = _estimator->estimateModels(sample, models);
 
                 for (int i = 0; i < number_of_models; i++) {
-                    if (is_magsac && iters % repeat_magsac == 0) {
-                        if (!_local_optimization->refineModel
-                                (models[i], best_score, models[i], current_score))
-                            continue;
-                    } else if (_model_verifier->isModelGood(models[i])) {
-                        if (!_model_verifier->getScore(current_score)) {
-                            if (_model_verifier->hasErrors())
-                                current_score = _quality->getScore(_model_verifier->getErrors());
-                            else current_score = _quality->getScore(models[i]);
-                        }
-                    } else continue;
+                    if (iters < max_hyp_test_before_ver) {
+                        current_score = _quality->getScore(models[i]);
+                    } else {
+                        if (is_magsac && iters % repeat_magsac == 0) {
+                            if (!_local_optimization->refineModel
+                                    (models[i], best_score, models[i], current_score))
+                                continue;
+                        } else if (_model_verifier->isModelGood(models[i])) {
+                            if (!_model_verifier->getScore(current_score)) {
+                                if (_model_verifier->hasErrors())
+                                    current_score = _quality->getScore(_model_verifier->getErrors());
+                                else current_score = _quality->getScore(models[i]);
+                            }
+                        } else continue;
+                    }
 
                     if (current_score.isBetter(best_score)) {
                         if (_degeneracy->recoverIfDegenerate(sample, models[i],
                                 non_degenerate_model, non_denegenerate_model_score)) {
                             // check if best non degenerate model is better than so far the best model
-                            if (non_denegenerate_model_score.isBetter(best_score)) {
-                                best_score = non_denegenerate_model_score;
-                                non_degenerate_model.copyTo(best_model);
-                            } else
-                                // non degenerate models are worse then so far the best model.
-                                continue;
-                        } else {
-                            // copy current score to best score
-                            best_score = current_score;
-                            // remember best model
-                            models[i].copyTo(best_model);
-                        }
-
-                        // update quality to save evaluation time of a model
-                        // with no chance of being better than so-far-the-best
-                        _quality->setBestScore(best_score.score);
-
-                        // update upper bound of iterations
-                        max_iters = _termination_criteria->update
-                                (best_model, best_score.inlier_number);
-                        if (iters > max_iters)
-                            break;
+                            if (non_denegenerate_model_score.isBetter(best_score))
+                                max_iters = update_best(non_degenerate_model, non_denegenerate_model_score);
+                            else continue;
+                        } else max_iters = update_best(models[i], current_score);
 
-                        if (LO) {//} && iters >= max_iters_before_LO) {
+                        if (LO && iters >= max_iters_before_LO) {
                             // do magsac if it wasn't already run
-                            if (is_magsac && iters % repeat_magsac == 0) continue; // magsac has already run
+                            if (is_magsac && iters % repeat_magsac == 0 && iters >= max_hyp_test_before_ver) continue; // magsac has already run
+                            was_LO_run = true;
                             // update model by Local optimization
                             if (_local_optimization->refineModel
-                                    (best_model, best_score, lo_model, lo_score))
-                                if (lo_score.isBetter(best_score)) {
-                                    best_score = lo_score;
-                                    lo_model.copyTo(best_model);
-                                    // update quality and verifier and termination again
-                                    _quality->setBestScore(best_score.score);
-                                    _model_verifier->update(best_score.inlier_number);
-                                    max_iters = _termination_criteria->update
-                                            (best_model, best_score.inlier_number);
-                                    if (iters > max_iters)
-                                        break;
+                                    (best_model, best_score, lo_model, lo_score)) {
+                                if (lo_score.isBetter(best_score)){
+                                    max_iters = update_best(lo_model, lo_score);
                                 }
+                            }
                         }
+                        if (iters > max_iters)
+                            break;
                     } // end of if so far the best score
                 } // end loop of number of models
+                if (LO && !was_LO_run && iters >= max_iters_before_LO) {
+                    was_LO_run = true;
+                    if (_local_optimization->refineModel(best_model, best_score, lo_model, lo_score))
+                        if (lo_score.isBetter(best_score)){
+                            max_iters = update_best(lo_model, lo_score);
+                        }
+                }
             } // end main while loop
 
             final_iters = iters;
@@ -223,7 +225,9 @@ public:
                 Ptr<Degeneracy> degeneracy = _degeneracy->clone(thread_state++);
                 Ptr<Quality> quality = _quality->clone();
                 Ptr<ModelVerifier> model_verifier = _model_verifier->clone(thread_state++); // update verifier
-                Ptr<LocalOptimization> local_optimization = _local_optimization->clone(thread_state++);
+                Ptr<LocalOptimization> local_optimization;
+                if (LO)
+                    local_optimization = _local_optimization->clone(thread_state++);
                 Ptr<TerminationCriteria> termination_criteria = _termination_criteria->clone();
                 Ptr<Sampler> sampler;
                 if (!is_prosac)
@@ -243,8 +247,12 @@ public:
                     new_model.copyTo(best_model_thread);
                     best_model_thread.copyTo(best_models[thread_rng_id]);
                     best_score_all_threads = best_score_thread;
+                    // update upper bound of iterations
+                    return termination_criteria->update
+                            (best_model_thread, best_score_thread.inlier_number);
                 };
 
+                bool was_LO_run = false;
                 for (iters = 0; iters < max_iters && !success; iters++) {
                     success = num_hypothesis_tested++ > max_iters;
 
@@ -274,56 +282,55 @@ public:
 
                     const int number_of_models = estimator->estimateModels(sample, models);
                     for (int i = 0; i < number_of_models; i++) {
-                        if (is_magsac && iters % repeat_magsac == 0) {
-                            if (!local_optimization->refineModel
-                                    (models[i], best_score_thread, models[i], current_score))
-                                continue;
-                        } else if (model_verifier->isModelGood(models[i])) {
-                            if (!model_verifier->getScore(current_score)) {
-                                if (model_verifier->hasErrors())
-                                    current_score = quality->getScore(model_verifier->getErrors());
-                                else current_score = quality->getScore(models[i]);
-                            }
-                        } else continue;
+                        if (iters < max_hyp_test_before_ver) {
+                            current_score = quality->getScore(models[i]);
+                        } else {
+                            if (is_magsac && iters % repeat_magsac == 0) {
+                                if (!local_optimization->refineModel
+                                        (models[i], best_score_thread, models[i], current_score))
+                                    continue;
+                            } else if (model_verifier->isModelGood(models[i])) {
+                                if (!model_verifier->getScore(current_score)) {
+                                    if (model_verifier->hasErrors())
+                                        current_score = quality->getScore(model_verifier->getErrors());
+                                    else current_score = quality->getScore(models[i]);
+                                }
+                            } else continue;
+                        }
 
                         if (current_score.isBetter(best_score_all_threads)) {
                             if (degeneracy->recoverIfDegenerate(sample, models[i],
                                         non_degenerate_model, non_denegenerate_model_score)) {
                                 // check if best non degenerate model is better than so far the best model
                                 if (non_denegenerate_model_score.isBetter(best_score_thread))
-                                    update_best(non_denegenerate_model_score, non_degenerate_model);
-                                else
-                                    // non degenerate models are worse then so far the best model.
-                                    continue;
+                                    max_iters = update_best(non_denegenerate_model_score, non_degenerate_model);
+                                else continue;
                             } else
-                                update_best(current_score, models[i]);
+                                max_iters = update_best(current_score, models[i]);
 
-                            // update upper bound of iterations
-                            max_iters = termination_criteria->update
-                                    (best_model_thread, best_score_thread.inlier_number);
-                            if (num_hypothesis_tested > max_iters) {
-                                success = true; break;
-                            }
-
-                            if (LO) {
+                            if (LO && iters >= max_iters_before_LO) {
                                 // do magsac if it wasn't already run
-                                if (is_magsac && iters % repeat_magsac == 0) continue;
+                                if (is_magsac && iters % repeat_magsac == 0 && iters >= max_hyp_test_before_ver) continue;
+                                was_LO_run = true;
                                 // update model by Local optimizaion
                                 if (local_optimization->refineModel
                                        (best_model_thread, best_score_thread, lo_model, lo_score))
                                     if (lo_score.isBetter(best_score_thread)) {
-                                        update_best(lo_score, lo_model);
-                                        // update termination again
-                                        max_iters = termination_criteria->update
-                                                (best_model_thread, best_score_thread.inlier_number);
-                                        if (num_hypothesis_tested > max_iters) {
-                                            success = true;
-                                            break;
-                                        }
+                                        max_iters = update_best(lo_score, lo_model);
                                     }
                             }
+                            if (num_hypothesis_tested > max_iters) {
+                                success = true; break;
+                            }
                         } // end of if so far the best score
                     } // end loop of number of models
+                    if (LO && !was_LO_run && iters >= max_iters_before_LO) {
+                        was_LO_run = true;
+                        if (_local_optimization->refineModel(best_model, best_score, lo_model, lo_score))
+                            if (lo_score.isBetter(best_score)){
+                                max_iters = update_best(lo_score, lo_model);
+                            }
+                    }
                 } // end of loop over iters
             }}); // end parallel
             ///////////////////////////////////////////////////////////////////////////////////////////////////////
@@ -354,7 +361,6 @@ public:
                     polished_model.copyTo(best_model);
                 }
         }
-
         // ================= here is ending ransac main implementation ===========================
         std::vector<bool> inliers_mask;
         if (params->isMaskRequired()) {
@@ -402,7 +408,7 @@ int mergePoints (InputArray pts1_, InputArray pts2_, Mat &pts, bool ispnp) {
 void saveMask (OutputArray mask, const std::vector<bool> &inliers_mask) {
     if (mask.needed()) {
         const int points_size = (int) inliers_mask.size();
-        mask.create(1, points_size, CV_8U);
+        mask.create(points_size, 1, CV_8U);
         auto * maskptr = mask.getMat().ptr<uchar>();
         for (int i = 0; i < points_size; i++)
             maskptr[i] = (uchar) inliers_mask[i];
@@ -433,7 +439,8 @@ void setParameters (int flag, Ptr<Model> &params, EstimationMethod estimator, do
             params = Model::create(thr, estimator, SamplingMethod::SAMPLING_UNIFORM, conf, max_iters,
                                    ScoreMethod::SCORE_METHOD_MAGSAC);
             params->setLocalOptimization(LocalOptimMethod ::LOCAL_OPTIM_SIGMA);
-            params->setLOSampleSize(100);
+            params->setLOSampleSize(params->isHomography() ? 75 : 50);
+            params->setLOIterations(params->isHomography() ? 15 : 10);
             break;
         case USAC_PARALLEL:
             params = Model::create(thr, estimator, SamplingMethod::SAMPLING_UNIFORM, conf, max_iters,
@@ -445,13 +452,15 @@ void setParameters (int flag, Ptr<Model> &params, EstimationMethod estimator, do
             params = Model::create(thr, estimator, SamplingMethod::SAMPLING_UNIFORM, conf, max_iters,
                                    ScoreMethod::SCORE_METHOD_MSAC);
             params->setLocalOptimization(LocalOptimMethod ::LOCAL_OPTIM_GC);
+            params->setLOSampleSize(20);
+            params->setLOIterations(25);
             break;
         case USAC_FAST:
             params = Model::create(thr, estimator, SamplingMethod::SAMPLING_UNIFORM, conf, max_iters,
-                                   ScoreMethod::SCORE_METHOD_RANSAC);
+                                   ScoreMethod::SCORE_METHOD_MSAC);
             params->setLocalOptimization(LocalOptimMethod ::LOCAL_OPTIM_INNER_AND_ITER_LO);
-            params->setLOIterations(7);
-            params->setLOIterativeIters(4);
+            params->setLOIterations(5);
+            params->setLOIterativeIters(3);
             break;
         case USAC_PROSAC:
             params = Model::create(thr, estimator, SamplingMethod::SAMPLING_PROSAC, conf, max_iters,
@@ -465,6 +474,13 @@ void setParameters (int flag, Ptr<Model> &params, EstimationMethod estimator, do
             break;
         default: CV_Error(cv::Error::StsBadFlag, "Incorrect flag for USAC!");
     }
+    // do not do too many iterations for PnP
+    if (estimator == EstimationMethod::P3P) {
+        if (params->getLOInnerMaxIters() > 15)
+            params->setLOIterations(15);
+        params->setLOIterativeIters(0);
+    }
+
     params->maskRequired(mask_needed);
 }
 
@@ -477,7 +493,12 @@ Mat findHomography (InputArray srcPoints, InputArray dstPoints, int method, doub
             ransac_output, noArray(), noArray(), noArray(), noArray())) {
         saveMask(mask, ransac_output->getInliersMask());
         return ransac_output->getModel() / ransac_output->getModel().at<double>(2,2);
-    } else return Mat();
+    }
+    if (mask.needed()){
+        mask.create(std::max(srcPoints.getMat().rows, srcPoints.getMat().cols), 1, CV_8U);
+        mask.setTo(Scalar::all(0));
+    }
+    return Mat();
 }
 
 Mat findFundamentalMat( InputArray points1, InputArray points2, int method, double thr,
@@ -489,7 +510,12 @@ Mat findFundamentalMat( InputArray points1, InputArray points2, int method, doub
             ransac_output, noArray(), noArray(), noArray(), noArray())) {
         saveMask(mask, ransac_output->getInliersMask());
         return ransac_output->getModel();
-    } else return Mat();
+    }
+    if (mask.needed()){
+        mask.create(std::max(points1.getMat().rows, points1.getMat().cols), 1, CV_8U);
+        mask.setTo(Scalar::all(0));
+    }
+    return Mat();
 }
 
 Mat findEssentialMat (InputArray points1, InputArray points2, InputArray cameraMatrix1,
@@ -501,7 +527,12 @@ Mat findEssentialMat (InputArray points1, InputArray points2, InputArray cameraM
             ransac_output, cameraMatrix1, cameraMatrix1, noArray(), noArray())) {
         saveMask(mask, ransac_output->getInliersMask());
         return ransac_output->getModel();
-    } else return Mat();
+    }
+    if (mask.needed()){
+        mask.create(std::max(points1.getMat().rows, points1.getMat().cols), 1, CV_8U);
+        mask.setTo(Scalar::all(0));
+    }
+    return Mat();
 }
 
 bool solvePnPRansac( InputArray objectPoints, InputArray imagePoints,
@@ -519,7 +550,12 @@ bool solvePnPRansac( InputArray objectPoints, InputArray imagePoints,
         model.col(0).copyTo(rvec);
         model.col(1).copyTo(tvec);
         return true;
-    } else return false;
+    }
+    if (mask.needed()){
+        mask.create(std::max(objectPoints.getMat().rows, objectPoints.getMat().cols), 1, CV_8U);
+        mask.setTo(Scalar::all(0));
+    }
+    return false;
 }
 
 Mat estimateAffine2D(InputArray from, InputArray to, OutputArray mask, int method,
@@ -531,7 +567,12 @@ Mat estimateAffine2D(InputArray from, InputArray to, OutputArray mask, int metho
             ransac_output, noArray(), noArray(), noArray(), noArray())) {
         saveMask(mask, ransac_output->getInliersMask());
         return ransac_output->getModel().rowRange(0,2);
-    } else return Mat();
+    }
+    if (mask.needed()){
+        mask.create(std::max(from.getMat().rows, from.getMat().cols), 1, CV_8U);
+        mask.setTo(Scalar::all(0));
+    }
+    return Mat();
 }
 
 class ModelImpl : public Model {
@@ -546,14 +587,14 @@ private:
 
     // for neighborhood graph
     int k_nearest_neighbors = 8;//, flann_search_params = 5, num_kd_trees = 1; // for FLANN
-    int cell_size = 25; // pixels, for grid neighbors searching
-    int radius = 20; // pixels, for radius-search neighborhood graph
+    int cell_size = 50; // pixels, for grid neighbors searching
+    int radius = 30; // pixels, for radius-search neighborhood graph
     NeighborSearchMethod neighborsType = NeighborSearchMethod::NEIGH_GRID;
 
     // Local Optimization parameters
     LocalOptimMethod lo = LocalOptimMethod ::LOCAL_OPTIM_INNER_AND_ITER_LO;
-    int lo_sample_size=14, lo_inner_iterations=15, lo_iterative_iterations=5,
-            lo_thr_multiplier=3, lo_iter_sample_size = 30;
+    int lo_sample_size=16, lo_inner_iterations=15, lo_iterative_iterations=8,
+            lo_thr_multiplier=15, lo_iter_sample_size = 30;
 
     // Graph cut parameters
     const double spatial_coherence_term = 0.975;
@@ -563,11 +604,11 @@ private:
 
     // preemptive verification test
     VerificationMethod verifier = VerificationMethod ::SprtVerifier;
-    const int max_hypothesis_test_before_verification = 10;
+    const int max_hypothesis_test_before_verification = 15;
 
     // sprt parameters
-    // lower bound estimate is 1.1% of inliers
-    double sprt_eps = 0.011, sprt_delta = 0.01, avg_num_models, time_for_model_est;
+    // lower bound estimate is 1% of inliers
+    double sprt_eps = 0.01, sprt_delta = 0.008, avg_num_models, time_for_model_est;
 
     // estimator error
     ErrorMetric est_error;
@@ -578,15 +619,16 @@ private:
     const std::vector<int> grid_cell_number = {16, 8, 4, 2};
 
     //for final least squares polisher
-    int final_lsq_iters = 2;
+    int final_lsq_iters = 3;
 
     bool need_mask = true, is_parallel = false;
     int random_generator_state = 0;
+    const int max_iters_before_LO = 100;
 
     // magsac parameters:
-    int DoF = 4;
-    double sigma_quantile = 3.64, upper_incomplete_of_sigma_quantile = 0.00365,
-        lower_incomplete_of_sigma_quantile = 1.30122, C = 0.25, maximum_thr = 10.;
+    int DoF = 2;
+    double sigma_quantile = 3.04, upper_incomplete_of_sigma_quantile = 0.00419,
+        lower_incomplete_of_sigma_quantile = 0.8629, C = 0.5, maximum_thr = 7.5;
 public:
     ModelImpl (double threshold_, EstimationMethod estimator_, SamplingMethod sampler_, double confidence_=0.95,
                int max_iterations_=5000, ScoreMethod score_ =ScoreMethod::SCORE_METHOD_MSAC) {
@@ -603,16 +645,16 @@ public:
                 avg_num_models = 1; time_for_model_est = 50;
                 sample_size = 3; est_error = ErrorMetric ::FORW_REPR_ERR; break;
             case (EstimationMethod::Homography):
-                avg_num_models = 1; time_for_model_est = 90;
+                avg_num_models = 1; time_for_model_est = 150;
                 sample_size = 4; est_error = ErrorMetric ::FORW_REPR_ERR; break;
             case (EstimationMethod::Fundamental):
-                avg_num_models = 2.38; time_for_model_est = 150; maximum_thr = 3;
+                avg_num_models = 2.38; time_for_model_est = 180; maximum_thr = 2.5;
                 sample_size = 7; est_error = ErrorMetric ::SAMPSON_ERR; break;
             case (EstimationMethod::Fundamental8):
-                avg_num_models = 1; time_for_model_est = 100; maximum_thr = 3;
+                avg_num_models = 1; time_for_model_est = 100; maximum_thr = 2.5;
                 sample_size = 8; est_error = ErrorMetric ::SAMPSON_ERR; break;
             case (EstimationMethod::Essential):
-                avg_num_models = 3.93; time_for_model_est = 2000; maximum_thr = 3;
+                avg_num_models = 3.93; time_for_model_est = 1000; maximum_thr = 2.5;
                 sample_size = 5; est_error = ErrorMetric ::SGD_ERR; break;
             case (EstimationMethod::P3P):
                 avg_num_models = 1.38; time_for_model_est = 800;
@@ -620,18 +662,19 @@ public:
             case (EstimationMethod::P6P):
                 avg_num_models = 1; time_for_model_est = 300;
                 sample_size = 6; est_error = ErrorMetric ::RERPOJ; break;
-            default: CV_Assert(0 && "Estimator has not implemented yet!");
+            default: CV_Error(cv::Error::StsNotImplemented, "Estimator has not implemented yet!");
         }
 
         if (estimator_ == EstimationMethod::P3P || estimator_ == EstimationMethod::P6P) {
             neighborsType = NeighborSearchMethod::NEIGH_FLANN_KNN;
             k_nearest_neighbors = 2;
-            DoF = 5;
-            sigma_quantile = 3.88;
-            upper_incomplete_of_sigma_quantile = 0.00458;
-            lower_incomplete_of_sigma_quantile = 1.96032;
-            C = 0.13298;
         }
+        if (estimator == EstimationMethod::Fundamental || estimator == EstimationMethod::Essential) {
+            lo_sample_size = 21;
+            lo_thr_multiplier = 10;
+        }
+        if (estimator == EstimationMethod::Homography)
+            maximum_thr = 8.;
         threshold = threshold_;
     }
     void setVerifier (VerificationMethod verifier_) override { verifier = verifier_; }
@@ -645,6 +688,7 @@ public:
     void setLOIterations (int iters) override { lo_inner_iterations = iters; }
     void setLOIterativeIters (int iters) override {lo_iterative_iterations = iters; }
     void setLOSampleSize (int lo_sample_size_) override { lo_sample_size = lo_sample_size_; }
+    void setThresholdMultiplierLO (double thr_mult) override { lo_thr_multiplier = (int) round(thr_mult); }
     void maskRequired (bool need_mask_) override { need_mask = need_mask_; }
     void setRandomGeneratorState (int state) override { random_generator_state = state; }
     bool isMaskRequired () const override { return need_mask; }
@@ -682,6 +726,7 @@ public:
     VerificationMethod getVerifier () const override { return verifier; }
     SamplingMethod getSampler () const override { return sampler; }
     int getRandomGeneratorState () const override { return random_generator_state; }
+    int getMaxItersBeforeLO () const override { return max_iters_before_LO; }
     double getSPRTdelta () const override { return sprt_delta; }
     double getSPRTepsilon () const override { return sprt_eps; }
     double getSPRTavgNumModels () const override { return avg_num_models; }
@@ -734,7 +779,9 @@ bool run (const Ptr<const Model> &params, InputArray points1, InputArray points2
             K1 = K1_.getMat(); K1.convertTo(K1, CV_64F);
             if (! dist_coeff1.empty()) {
                 // undistortPoints also calibrate points using K
-                undistortPoints(points1, undist_points1, K1_, dist_coeff1);
+                if (points1.isContinuous())
+                     undistortPoints(points1, undist_points1, K1_, dist_coeff1);
+                else undistortPoints(points1.getMat().clone(), undist_points1, K1_, dist_coeff1);
                 points_size = mergePoints(undist_points1, points2, points, true);
                 Utils::normalizeAndDecalibPointsPnP (K1, points, calib_points);
             } else {
@@ -750,8 +797,12 @@ bool run (const Ptr<const Model> &params, InputArray points1, InputArray points2
             K2 = K2_.getMat(); K2.convertTo(K2, CV_64F);
             if (! dist_coeff1.empty() || ! dist_coeff2.empty()) {
                 // undistortPoints also calibrate points using K
-                cv::undistortPoints(points1, undist_points1, K1_, dist_coeff1);
-                cv::undistortPoints(points2, undist_points2, K2_, dist_coeff2);
+                if (points1.isContinuous())
+                     undistortPoints(points1, undist_points1, K1_, dist_coeff1);
+                else undistortPoints(points1.getMat().clone(), undist_points1, K1_, dist_coeff1);
+                if (points2.isContinuous())
+                     undistortPoints(points2, undist_points2, K2_, dist_coeff2);
+                else undistortPoints(points2.getMat().clone(), undist_points2, K2_, dist_coeff2);
                 points_size = mergePoints(undist_points1, undist_points2, calib_points, false);
             } else {
                 points_size = mergePoints(points1, points2, points, false);
@@ -771,7 +822,7 @@ bool run (const Ptr<const Model> &params, InputArray points1, InputArray points2
         if (params->getNeighborsSearch() == NeighborSearchMethod::NEIGH_GRID) {
             graph = GridNeighborhoodGraph::create(points, points_size,
                 params->getCellSize(), params->getCellSize(),
-                params->getCellSize(), params->getCellSize());
+                params->getCellSize(), params->getCellSize(), 10);
         } else if (params->getNeighborsSearch() == NeighborSearchMethod::NEIGH_FLANN_KNN) {
             graph = FlannNeighborhoodGraph::create(points, points_size,params->getKNN(), false, 5, 1);
         } else if (params->getNeighborsSearch() == NeighborSearchMethod::NEIGH_FLANN_RADIUS) {
@@ -802,7 +853,7 @@ bool run (const Ptr<const Model> &params, InputArray points1, InputArray points2
                         "Cell number in layers must be in decreasing order!");
             layers.emplace_back(GridNeighborhoodGraph::create(points, points_size,
           (int)(img1_width / (float)cell_number), (int)(img1_height / (float)cell_number),
-          (int)(img2_width / (float)cell_number), (int)(img2_height / (float)cell_number)));
+          (int)(img2_width / (float)cell_number), (int)(img2_height / (float)cell_number), 10));
         }
     }
 
@@ -811,8 +862,10 @@ bool run (const Ptr<const Model> &params, InputArray points1, InputArray points2
         points = calib_points;
         // if maximum calibrated threshold significanlty differs threshold then set upper bound
         if (max_thr > 10*threshold)
-            max_thr = 10*threshold;
+            max_thr = sqrt(10*threshold); // max thr will be squared after
     }
+    if (max_thr < threshold)
+        max_thr = threshold;
 
     switch (params->getError()) {
         case ErrorMetric::SYMM_REPR_ERR:
@@ -936,7 +989,8 @@ bool run (const Ptr<const Model> &params, InputArray points1, InputArray points2
                 lo = GraphCut::create(estimator, error, quality, graph, lo_sampler, threshold,
                    params->getGraphCutSpatialCoherenceTerm(), params->getLOInnerMaxIters()); break;
             case LocalOptimMethod::LOCAL_OPTIM_SIGMA:
-                lo = SigmaConsensus::create(estimator, error, quality, verifier, params->getLOSampleSize(), 1,
+                lo = SigmaConsensus::create(estimator, error, quality, verifier,
+                     params->getLOSampleSize(), params->getLOInnerMaxIters(),
                      params->getDegreesOfFreedom(), params->getSigmaQuantile(),
                      params->getUpperIncompleteOfSigmaQuantile(), params->getC(), max_thr); break;
             default: CV_Error(cv::Error::StsNotImplemented , "Local Optimization is not implemented!");
index d045e1f..c443728 100644 (file)
@@ -256,8 +256,6 @@ public:
     }
 
     void generateSample (std::vector<int> &sample) override {
-        // std::cout << "PROSAC sampler, termination length " << termination_length << "\n";
-
         if (kth_sample_number > growth_max_samples) {
             // if PROSAC has not converged to solution then do uniform sampling.
             random_gen->generateUniqueRandomSet(sample, sample_size, points_size);
index 9a371a4..1c781a7 100644 (file)
@@ -168,7 +168,7 @@ Vec3d Math::rotMat2RotVec (const Matx33d &R) {
 /*
  * Eliminate matrix of m rows and n columns to be upper triangular.
  */
-void Math::eliminateUpperTriangular (std::vector<double> &a, int m, int n) {
+bool Math::eliminateUpperTriangular (std::vector<double> &a, int m, int n) {
     for (int r = 0; r < m; r++){
         double pivot = a[r*n+r];
         int row_with_pivot = r;
@@ -182,7 +182,7 @@ void Math::eliminateUpperTriangular (std::vector<double> &a, int m, int n) {
 
         // if pivot value is 0 continue
         if (fabs(pivot) < DBL_EPSILON)
-            continue;
+            return false; // matrix is not full rank -> terminate
 
         // swap row with maximum pivot value with current row
         for (int c = r; c < n; c++)
@@ -190,11 +190,14 @@ void Math::eliminateUpperTriangular (std::vector<double> &a, int m, int n) {
 
         // eliminate other rows
         for (int j = r+1; j < m; j++){
-            const auto fac = a[j*n+r] / pivot;
-            for (int c = r; c < n; c++)
-                a[j*n+c] -= fac * a[r*n+c];
+            const int row_idx1 = j*n, row_idx2 = r*n;
+            const auto fac = a[row_idx1+r] / pivot;
+            a[row_idx1+r] = 0; // zero eliminated element
+            for (int c = r+1; c < n; c++)
+                a[row_idx1+c] -= fac * a[row_idx2+c];
         }
     }
+    return true;
 }
 
 //////////////////////////////////////// RANDOM GENERATOR /////////////////////////////
@@ -467,7 +470,8 @@ private:
     std::vector<std::vector<int>> graph;
 public:
     GridNeighborhoodGraphImpl (const Mat &container_, int points_size,
-          int cell_size_x_img1, int cell_size_y_img1, int cell_size_x_img2, int cell_size_y_img2) {
+          int cell_size_x_img1, int cell_size_y_img1, int cell_size_x_img2, int cell_size_y_img2,
+          int max_neighbors) {
 
         const auto * const container = (float *) container_.data;
         // <int, int, int, int> -> {neighbors set}
@@ -501,11 +505,14 @@ public:
             for (int v_in_cell : neighbors) {
                 // there is always at least one neighbor
                 auto &graph_row = graph[v_in_cell];
-                graph_row = std::vector<int>(neighbors_in_cell-1);
+                graph_row = std::vector<int>(std::min(max_neighbors, neighbors_in_cell-1));
                 int j = 0;
                 for (int n : neighbors)
-                    if (n != v_in_cell)
+                    if (n != v_in_cell){
                         graph_row[j++] = n;
+                        if (j >= max_neighbors)
+                            break;
+                    }
             }
         }
     }
@@ -519,8 +526,8 @@ public:
 
 Ptr<GridNeighborhoodGraph> GridNeighborhoodGraph::create(const Mat &points,
      int points_size, int cell_size_x_img1_, int cell_size_y_img1_,
-     int cell_size_x_img2_, int cell_size_y_img2_) {
+     int cell_size_x_img2_, int cell_size_y_img2_, int max_neighbors) {
     return makePtr<GridNeighborhoodGraphImpl>(points, points_size,
-      cell_size_x_img1_, cell_size_y_img1_, cell_size_x_img2_, cell_size_y_img2_);
+      cell_size_x_img1_, cell_size_y_img1_, cell_size_x_img2_, cell_size_y_img2_, max_neighbors);
 }
 }}
\ No newline at end of file