made everything compile and even run somehow
authorVadim Pisarevsky <vadim.pisarevsky@gmail.com>
Sat, 2 Aug 2014 21:41:09 +0000 (01:41 +0400)
committerVadim Pisarevsky <vadim.pisarevsky@gmail.com>
Sat, 2 Aug 2014 21:41:09 +0000 (01:41 +0400)
31 files changed:
apps/traincascade/CMakeLists.txt
apps/traincascade/boost.h
apps/traincascade/cascadeclassifier.h
apps/traincascade/old_ml.hpp [new file with mode: 0644]
apps/traincascade/old_ml_boost.cpp [new file with mode: 0644]
apps/traincascade/old_ml_data.cpp [new file with mode: 0644]
apps/traincascade/old_ml_inner_functions.cpp [new file with mode: 0644]
apps/traincascade/old_ml_precomp.hpp [new file with mode: 0644]
apps/traincascade/old_ml_tree.cpp [new file with mode: 0644]
apps/traincascade/traincascade.cpp
apps/traincascade/traincascade_features.h
modules/ml/include/opencv2/ml.hpp
modules/ml/src/ann_mlp.cpp
modules/ml/src/boost.cpp
modules/ml/src/data.cpp
modules/ml/src/inner_functions.cpp
modules/ml/src/knearest.cpp
modules/ml/src/nbayes.cpp
modules/ml/src/svm.cpp
modules/ml/src/tree.cpp
modules/ml/test/test_emknearestkmeans.cpp
modules/ml/test/test_mltests2.cpp
samples/cpp/agaricus-lepiota.data [deleted file]
samples/cpp/bagofwords_classification.cpp
samples/cpp/letter_recog.cpp
samples/cpp/mushroom.cpp [deleted file]
samples/cpp/points_classifier.cpp
samples/cpp/train_HOG.cpp
samples/cpp/tree_engine.cpp
samples/cpp/tutorial_code/ml/introduction_to_svm/introduction_to_svm.cpp
samples/cpp/tutorial_code/ml/non_linear_svms/non_linear_svms.cpp

index cca5636..ab32b4c 100644 (file)
@@ -1,4 +1,4 @@
-set(OPENCV_TRAINCASCADE_DEPS opencv_core opencv_ml opencv_imgproc opencv_photo opencv_objdetect opencv_imgcodecs opencv_videoio opencv_highgui opencv_calib3d opencv_video opencv_features2d)
+set(OPENCV_TRAINCASCADE_DEPS opencv_core opencv_imgproc opencv_objdetect opencv_imgcodecs opencv_highgui opencv_calib3d opencv_features2d)
 ocv_check_dependencies(${OPENCV_TRAINCASCADE_DEPS})
 
 if(NOT OCV_DEPENDENCIES_FOUND)
@@ -10,13 +10,10 @@ project(traincascade)
 ocv_include_directories("${CMAKE_CURRENT_SOURCE_DIR}" "${OpenCV_SOURCE_DIR}/include/opencv")
 ocv_include_modules(${OPENCV_TRAINCASCADE_DEPS})
 
-set(traincascade_files traincascade.cpp
-  cascadeclassifier.cpp cascadeclassifier.h
-  boost.cpp boost.h features.cpp traincascade_features.h
-  haarfeatures.cpp haarfeatures.h
-  lbpfeatures.cpp lbpfeatures.h
-  HOGfeatures.cpp HOGfeatures.h
-  imagestorage.cpp imagestorage.h)
+file(GLOB SRCS *.cpp)
+file(GLOB HDRS *.h*)
+
+set(traincascade_files ${SRCS} ${HDRS})
 
 set(the_target opencv_traincascade)
 add_executable(${the_target} ${traincascade_files})
index 0edf776..48d4789 100644 (file)
@@ -2,7 +2,7 @@
 #define _OPENCV_BOOST_H_
 
 #include "traincascade_features.h"
-#include "ml.h"
+#include "old_ml.hpp"
 
 struct CvCascadeBoostParams : CvBoostParams
 {
index 93be478..6d6cb5b 100644 (file)
@@ -7,8 +7,6 @@
 #include "lbpfeatures.h"
 #include "HOGfeatures.h" //new
 #include "boost.h"
-#include "cv.h"
-#include "cxcore.h"
 
 #define CC_CASCADE_FILENAME "cascade.xml"
 #define CC_PARAMS_FILENAME "params.xml"
diff --git a/apps/traincascade/old_ml.hpp b/apps/traincascade/old_ml.hpp
new file mode 100644 (file)
index 0000000..6ec31a0
--- /dev/null
@@ -0,0 +1,2165 @@
+/*M///////////////////////////////////////////////////////////////////////////////////////
+//
+//  IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
+//
+//  By downloading, copying, installing or using the software you agree to this license.
+//  If you do not agree to this license, do not download, install,
+//  copy or use the software.
+//
+//
+//                        Intel License Agreement
+//
+// Copyright (C) 2000, Intel Corporation, all rights reserved.
+// Third party copyrights are property of their respective owners.
+//
+// Redistribution and use in source and binary forms, with or without modification,
+// are permitted provided that the following conditions are met:
+//
+//   * Redistribution's of source code must retain the above copyright notice,
+//     this list of conditions and the following disclaimer.
+//
+//   * Redistribution's in binary form must reproduce the above copyright notice,
+//     this list of conditions and the following disclaimer in the documentation
+//     and/or other materials provided with the distribution.
+//
+//   * The name of Intel Corporation may not be used to endorse or promote products
+//     derived from this software without specific prior written permission.
+//
+// This software is provided by the copyright holders and contributors "as is" and
+// any express or implied warranties, including, but not limited to, the implied
+// warranties of merchantability and fitness for a particular purpose are disclaimed.
+// In no event shall the Intel Corporation or contributors be liable for any direct,
+// indirect, incidental, special, exemplary, or consequential damages
+// (including, but not limited to, procurement of substitute goods or services;
+// loss of use, data, or profits; or business interruption) however caused
+// and on any theory of liability, whether in contract, strict liability,
+// or tort (including negligence or otherwise) arising in any way out of
+// the use of this software, even if advised of the possibility of such damage.
+//
+//M*/
+
+#ifndef __OPENCV_ML_HPP__
+#define __OPENCV_ML_HPP__
+
+#ifdef __cplusplus
+#  include "opencv2/core.hpp"
+#endif
+
+#include "opencv2/core/core_c.h"
+#include <limits.h>
+
+#ifdef __cplusplus
+
+#include <map>
+#include <iostream>
+
+// Apple defines a check() macro somewhere in the debug headers
+// that interferes with a method definiton in this header
+#undef check
+
+/****************************************************************************************\
+*                               Main struct definitions                                  *
+\****************************************************************************************/
+
+/* log(2*PI) */
+#define CV_LOG2PI (1.8378770664093454835606594728112)
+
+/* columns of <trainData> matrix are training samples */
+#define CV_COL_SAMPLE 0
+
+/* rows of <trainData> matrix are training samples */
+#define CV_ROW_SAMPLE 1
+
+#define CV_IS_ROW_SAMPLE(flags) ((flags) & CV_ROW_SAMPLE)
+
+struct CvVectors
+{
+    int type;
+    int dims, count;
+    CvVectors* next;
+    union
+    {
+        uchar** ptr;
+        float** fl;
+        double** db;
+    } data;
+};
+
+#if 0
+/* A structure, representing the lattice range of statmodel parameters.
+   It is used for optimizing statmodel parameters by cross-validation method.
+   The lattice is logarithmic, so <step> must be greater then 1. */
+typedef struct CvParamLattice
+{
+    double min_val;
+    double max_val;
+    double step;
+}
+CvParamLattice;
+
+CV_INLINE CvParamLattice cvParamLattice( double min_val, double max_val,
+                                         double log_step )
+{
+    CvParamLattice pl;
+    pl.min_val = MIN( min_val, max_val );
+    pl.max_val = MAX( min_val, max_val );
+    pl.step = MAX( log_step, 1. );
+    return pl;
+}
+
+CV_INLINE CvParamLattice cvDefaultParamLattice( void )
+{
+    CvParamLattice pl = {0,0,0};
+    return pl;
+}
+#endif
+
+/* Variable type */
+#define CV_VAR_NUMERICAL    0
+#define CV_VAR_ORDERED      0
+#define CV_VAR_CATEGORICAL  1
+
+#define CV_TYPE_NAME_ML_SVM         "opencv-ml-svm"
+#define CV_TYPE_NAME_ML_KNN         "opencv-ml-knn"
+#define CV_TYPE_NAME_ML_NBAYES      "opencv-ml-bayesian"
+#define CV_TYPE_NAME_ML_EM          "opencv-ml-em"
+#define CV_TYPE_NAME_ML_BOOSTING    "opencv-ml-boost-tree"
+#define CV_TYPE_NAME_ML_TREE        "opencv-ml-tree"
+#define CV_TYPE_NAME_ML_ANN_MLP     "opencv-ml-ann-mlp"
+#define CV_TYPE_NAME_ML_CNN         "opencv-ml-cnn"
+#define CV_TYPE_NAME_ML_RTREES      "opencv-ml-random-trees"
+#define CV_TYPE_NAME_ML_ERTREES     "opencv-ml-extremely-randomized-trees"
+#define CV_TYPE_NAME_ML_GBT         "opencv-ml-gradient-boosting-trees"
+
+#define CV_TRAIN_ERROR  0
+#define CV_TEST_ERROR   1
+
+class CvStatModel
+{
+public:
+    CvStatModel();
+    virtual ~CvStatModel();
+
+    virtual void clear();
+
+    CV_WRAP virtual void save( const char* filename, const char* name=0 ) const;
+    CV_WRAP virtual void load( const char* filename, const char* name=0 );
+
+    virtual void write( CvFileStorage* storage, const char* name ) const;
+    virtual void read( CvFileStorage* storage, CvFileNode* node );
+
+protected:
+    const char* default_model_name;
+};
+
+/****************************************************************************************\
+*                                 Normal Bayes Classifier                                *
+\****************************************************************************************/
+
+/* The structure, representing the grid range of statmodel parameters.
+   It is used for optimizing statmodel accuracy by varying model parameters,
+   the accuracy estimate being computed by cross-validation.
+   The grid is logarithmic, so <step> must be greater then 1. */
+
+class CvMLData;
+
+struct CvParamGrid
+{
+    // SVM params type
+    enum { SVM_C=0, SVM_GAMMA=1, SVM_P=2, SVM_NU=3, SVM_COEF=4, SVM_DEGREE=5 };
+
+    CvParamGrid()
+    {
+        min_val = max_val = step = 0;
+    }
+
+    CvParamGrid( double min_val, double max_val, double log_step );
+    //CvParamGrid( int param_id );
+    bool check() const;
+
+    CV_PROP_RW double min_val;
+    CV_PROP_RW double max_val;
+    CV_PROP_RW double step;
+};
+
+inline CvParamGrid::CvParamGrid( double _min_val, double _max_val, double _log_step )
+{
+    min_val = _min_val;
+    max_val = _max_val;
+    step = _log_step;
+}
+
+class CvNormalBayesClassifier : public CvStatModel
+{
+public:
+    CV_WRAP CvNormalBayesClassifier();
+    virtual ~CvNormalBayesClassifier();
+
+    CvNormalBayesClassifier( const CvMat* trainData, const CvMat* responses,
+        const CvMat* varIdx=0, const CvMat* sampleIdx=0 );
+
+    virtual bool train( const CvMat* trainData, const CvMat* responses,
+        const CvMat* varIdx = 0, const CvMat* sampleIdx=0, bool update=false );
+
+    virtual float predict( const CvMat* samples, CV_OUT CvMat* results=0, CV_OUT CvMat* results_prob=0 ) const;
+    CV_WRAP virtual void clear();
+
+    CV_WRAP CvNormalBayesClassifier( const cv::Mat& trainData, const cv::Mat& responses,
+                            const cv::Mat& varIdx=cv::Mat(), const cv::Mat& sampleIdx=cv::Mat() );
+    CV_WRAP virtual bool train( const cv::Mat& trainData, const cv::Mat& responses,
+                       const cv::Mat& varIdx = cv::Mat(), const cv::Mat& sampleIdx=cv::Mat(),
+                       bool update=false );
+    CV_WRAP virtual float predict( const cv::Mat& samples, CV_OUT cv::Mat* results=0, CV_OUT cv::Mat* results_prob=0 ) const;
+
+    virtual void write( CvFileStorage* storage, const char* name ) const;
+    virtual void read( CvFileStorage* storage, CvFileNode* node );
+
+protected:
+    int     var_count, var_all;
+    CvMat*  var_idx;
+    CvMat*  cls_labels;
+    CvMat** count;
+    CvMat** sum;
+    CvMat** productsum;
+    CvMat** avg;
+    CvMat** inv_eigen_values;
+    CvMat** cov_rotate_mats;
+    CvMat*  c;
+};
+
+
+/****************************************************************************************\
+*                          K-Nearest Neighbour Classifier                                *
+\****************************************************************************************/
+
+// k Nearest Neighbors
+class CvKNearest : public CvStatModel
+{
+public:
+
+    CV_WRAP CvKNearest();
+    virtual ~CvKNearest();
+
+    CvKNearest( const CvMat* trainData, const CvMat* responses,
+                const CvMat* sampleIdx=0, bool isRegression=false, int max_k=32 );
+
+    virtual bool train( const CvMat* trainData, const CvMat* responses,
+                        const CvMat* sampleIdx=0, bool is_regression=false,
+                        int maxK=32, bool updateBase=false );
+
+    virtual float find_nearest( const CvMat* samples, int k, CV_OUT CvMat* results=0,
+        const float** neighbors=0, CV_OUT CvMat* neighborResponses=0, CV_OUT CvMat* dist=0 ) const;
+
+    CV_WRAP CvKNearest( const cv::Mat& trainData, const cv::Mat& responses,
+               const cv::Mat& sampleIdx=cv::Mat(), bool isRegression=false, int max_k=32 );
+
+    CV_WRAP virtual bool train( const cv::Mat& trainData, const cv::Mat& responses,
+                       const cv::Mat& sampleIdx=cv::Mat(), bool isRegression=false,
+                       int maxK=32, bool updateBase=false );
+
+    virtual float find_nearest( const cv::Mat& samples, int k, cv::Mat* results=0,
+                                const float** neighbors=0, cv::Mat* neighborResponses=0,
+                                cv::Mat* dist=0 ) const;
+    CV_WRAP virtual float find_nearest( const cv::Mat& samples, int k, CV_OUT cv::Mat& results,
+                                        CV_OUT cv::Mat& neighborResponses, CV_OUT cv::Mat& dists) const;
+
+    virtual void clear();
+    int get_max_k() const;
+    int get_var_count() const;
+    int get_sample_count() const;
+    bool is_regression() const;
+
+    virtual float write_results( int k, int k1, int start, int end,
+        const float* neighbor_responses, const float* dist, CvMat* _results,
+        CvMat* _neighbor_responses, CvMat* _dist, Cv32suf* sort_buf ) const;
+
+    virtual void find_neighbors_direct( const CvMat* _samples, int k, int start, int end,
+        float* neighbor_responses, const float** neighbors, float* dist ) const;
+
+protected:
+
+    int max_k, var_count;
+    int total;
+    bool regression;
+    CvVectors* samples;
+};
+
+/****************************************************************************************\
+*                                   Support Vector Machines                              *
+\****************************************************************************************/
+
+// SVM training parameters
+struct CvSVMParams
+{
+    CvSVMParams();
+    CvSVMParams( int svm_type, int kernel_type,
+                 double degree, double gamma, double coef0,
+                 double Cvalue, double nu, double p,
+                 CvMat* class_weights, CvTermCriteria term_crit );
+
+    CV_PROP_RW int         svm_type;
+    CV_PROP_RW int         kernel_type;
+    CV_PROP_RW double      degree; // for poly
+    CV_PROP_RW double      gamma;  // for poly/rbf/sigmoid/chi2
+    CV_PROP_RW double      coef0;  // for poly/sigmoid
+
+    CV_PROP_RW double      C;  // for CV_SVM_C_SVC, CV_SVM_EPS_SVR and CV_SVM_NU_SVR
+    CV_PROP_RW double      nu; // for CV_SVM_NU_SVC, CV_SVM_ONE_CLASS, and CV_SVM_NU_SVR
+    CV_PROP_RW double      p; // for CV_SVM_EPS_SVR
+    CvMat*      class_weights; // for CV_SVM_C_SVC
+    CV_PROP_RW CvTermCriteria term_crit; // termination criteria
+};
+
+
+struct CvSVMKernel
+{
+    typedef void (CvSVMKernel::*Calc)( int vec_count, int vec_size, const float** vecs,
+                                       const float* another, float* results );
+    CvSVMKernel();
+    CvSVMKernel( const CvSVMParams* params, Calc _calc_func );
+    virtual bool create( const CvSVMParams* params, Calc _calc_func );
+    virtual ~CvSVMKernel();
+
+    virtual void clear();
+    virtual void calc( int vcount, int n, const float** vecs, const float* another, float* results );
+
+    const CvSVMParams* params;
+    Calc calc_func;
+
+    virtual void calc_non_rbf_base( int vec_count, int vec_size, const float** vecs,
+                                    const float* another, float* results,
+                                    double alpha, double beta );
+    virtual void calc_intersec( int vcount, int var_count, const float** vecs,
+                            const float* another, float* results );
+    virtual void calc_chi2( int vec_count, int vec_size, const float** vecs,
+                              const float* another, float* results );
+    virtual void calc_linear( int vec_count, int vec_size, const float** vecs,
+                              const float* another, float* results );
+    virtual void calc_rbf( int vec_count, int vec_size, const float** vecs,
+                           const float* another, float* results );
+    virtual void calc_poly( int vec_count, int vec_size, const float** vecs,
+                            const float* another, float* results );
+    virtual void calc_sigmoid( int vec_count, int vec_size, const float** vecs,
+                               const float* another, float* results );
+};
+
+
+struct CvSVMKernelRow
+{
+    CvSVMKernelRow* prev;
+    CvSVMKernelRow* next;
+    float* data;
+};
+
+
+struct CvSVMSolutionInfo
+{
+    double obj;
+    double rho;
+    double upper_bound_p;
+    double upper_bound_n;
+    double r;   // for Solver_NU
+};
+
+class CvSVMSolver
+{
+public:
+    typedef bool (CvSVMSolver::*SelectWorkingSet)( int& i, int& j );
+    typedef float* (CvSVMSolver::*GetRow)( int i, float* row, float* dst, bool existed );
+    typedef void (CvSVMSolver::*CalcRho)( double& rho, double& r );
+
+    CvSVMSolver();
+
+    CvSVMSolver( int count, int var_count, const float** samples, schar* y,
+                 int alpha_count, double* alpha, double Cp, double Cn,
+                 CvMemStorage* storage, CvSVMKernel* kernel, GetRow get_row,
+                 SelectWorkingSet select_working_set, CalcRho calc_rho );
+    virtual bool create( int count, int var_count, const float** samples, schar* y,
+                 int alpha_count, double* alpha, double Cp, double Cn,
+                 CvMemStorage* storage, CvSVMKernel* kernel, GetRow get_row,
+                 SelectWorkingSet select_working_set, CalcRho calc_rho );
+    virtual ~CvSVMSolver();
+
+    virtual void clear();
+    virtual bool solve_generic( CvSVMSolutionInfo& si );
+
+    virtual bool solve_c_svc( int count, int var_count, const float** samples, schar* y,
+                              double Cp, double Cn, CvMemStorage* storage,
+                              CvSVMKernel* kernel, double* alpha, CvSVMSolutionInfo& si );
+    virtual bool solve_nu_svc( int count, int var_count, const float** samples, schar* y,
+                               CvMemStorage* storage, CvSVMKernel* kernel,
+                               double* alpha, CvSVMSolutionInfo& si );
+    virtual bool solve_one_class( int count, int var_count, const float** samples,
+                                  CvMemStorage* storage, CvSVMKernel* kernel,
+                                  double* alpha, CvSVMSolutionInfo& si );
+
+    virtual bool solve_eps_svr( int count, int var_count, const float** samples, const float* y,
+                                CvMemStorage* storage, CvSVMKernel* kernel,
+                                double* alpha, CvSVMSolutionInfo& si );
+
+    virtual bool solve_nu_svr( int count, int var_count, const float** samples, const float* y,
+                               CvMemStorage* storage, CvSVMKernel* kernel,
+                               double* alpha, CvSVMSolutionInfo& si );
+
+    virtual float* get_row_base( int i, bool* _existed );
+    virtual float* get_row( int i, float* dst );
+
+    int sample_count;
+    int var_count;
+    int cache_size;
+    int cache_line_size;
+    const float** samples;
+    const CvSVMParams* params;
+    CvMemStorage* storage;
+    CvSVMKernelRow lru_list;
+    CvSVMKernelRow* rows;
+
+    int alpha_count;
+
+    double* G;
+    double* alpha;
+
+    // -1 - lower bound, 0 - free, 1 - upper bound
+    schar* alpha_status;
+
+    schar* y;
+    double* b;
+    float* buf[2];
+    double eps;
+    int max_iter;
+    double C[2];  // C[0] == Cn, C[1] == Cp
+    CvSVMKernel* kernel;
+
+    SelectWorkingSet select_working_set_func;
+    CalcRho calc_rho_func;
+    GetRow get_row_func;
+
+    virtual bool select_working_set( int& i, int& j );
+    virtual bool select_working_set_nu_svm( int& i, int& j );
+    virtual void calc_rho( double& rho, double& r );
+    virtual void calc_rho_nu_svm( double& rho, double& r );
+
+    virtual float* get_row_svc( int i, float* row, float* dst, bool existed );
+    virtual float* get_row_one_class( int i, float* row, float* dst, bool existed );
+    virtual float* get_row_svr( int i, float* row, float* dst, bool existed );
+};
+
+
+struct CvSVMDecisionFunc
+{
+    double rho;
+    int sv_count;
+    double* alpha;
+    int* sv_index;
+};
+
+
+// SVM model
+class CvSVM : public CvStatModel
+{
+public:
+    // SVM type
+    enum { C_SVC=100, NU_SVC=101, ONE_CLASS=102, EPS_SVR=103, NU_SVR=104 };
+
+    // SVM kernel type
+    enum { LINEAR=0, POLY=1, RBF=2, SIGMOID=3, CHI2=4, INTER=5 };
+
+    // SVM params type
+    enum { C=0, GAMMA=1, P=2, NU=3, COEF=4, DEGREE=5 };
+
+    CV_WRAP CvSVM();
+    virtual ~CvSVM();
+
+    CvSVM( const CvMat* trainData, const CvMat* responses,
+           const CvMat* varIdx=0, const CvMat* sampleIdx=0,
+           CvSVMParams params=CvSVMParams() );
+
+    virtual bool train( const CvMat* trainData, const CvMat* responses,
+                        const CvMat* varIdx=0, const CvMat* sampleIdx=0,
+                        CvSVMParams params=CvSVMParams() );
+
+    virtual bool train_auto( const CvMat* trainData, const CvMat* responses,
+        const CvMat* varIdx, const CvMat* sampleIdx, CvSVMParams params,
+        int kfold = 10,
+        CvParamGrid Cgrid      = get_default_grid(CvSVM::C),
+        CvParamGrid gammaGrid  = get_default_grid(CvSVM::GAMMA),
+        CvParamGrid pGrid      = get_default_grid(CvSVM::P),
+        CvParamGrid nuGrid     = get_default_grid(CvSVM::NU),
+        CvParamGrid coeffGrid  = get_default_grid(CvSVM::COEF),
+        CvParamGrid degreeGrid = get_default_grid(CvSVM::DEGREE),
+        bool balanced=false );
+
+    virtual float predict( const CvMat* sample, bool returnDFVal=false ) const;
+    virtual float predict( const CvMat* samples, CV_OUT CvMat* results, bool returnDFVal=false ) const;
+
+    CV_WRAP CvSVM( const cv::Mat& trainData, const cv::Mat& responses,
+          const cv::Mat& varIdx=cv::Mat(), const cv::Mat& sampleIdx=cv::Mat(),
+          CvSVMParams params=CvSVMParams() );
+
+    CV_WRAP virtual bool train( const cv::Mat& trainData, const cv::Mat& responses,
+                       const cv::Mat& varIdx=cv::Mat(), const cv::Mat& sampleIdx=cv::Mat(),
+                       CvSVMParams params=CvSVMParams() );
+
+    CV_WRAP virtual bool train_auto( const cv::Mat& trainData, const cv::Mat& responses,
+                            const cv::Mat& varIdx, const cv::Mat& sampleIdx, CvSVMParams params,
+                            int k_fold = 10,
+                            CvParamGrid Cgrid      = CvSVM::get_default_grid(CvSVM::C),
+                            CvParamGrid gammaGrid  = CvSVM::get_default_grid(CvSVM::GAMMA),
+                            CvParamGrid pGrid      = CvSVM::get_default_grid(CvSVM::P),
+                            CvParamGrid nuGrid     = CvSVM::get_default_grid(CvSVM::NU),
+                            CvParamGrid coeffGrid  = CvSVM::get_default_grid(CvSVM::COEF),
+                            CvParamGrid degreeGrid = CvSVM::get_default_grid(CvSVM::DEGREE),
+                            bool balanced=false);
+    CV_WRAP virtual float predict( const cv::Mat& sample, bool returnDFVal=false ) const;
+    CV_WRAP_AS(predict_all) virtual void predict( cv::InputArray samples, cv::OutputArray results ) const;
+
+    CV_WRAP virtual int get_support_vector_count() const;
+    virtual const float* get_support_vector(int i) const;
+    virtual CvSVMParams get_params() const { return params; }
+    CV_WRAP virtual void clear();
+
+    virtual const CvSVMDecisionFunc* get_decision_function() const { return decision_func; }
+
+    static CvParamGrid get_default_grid( int param_id );
+
+    virtual void write( CvFileStorage* storage, const char* name ) const;
+    virtual void read( CvFileStorage* storage, CvFileNode* node );
+    CV_WRAP int get_var_count() const { return var_idx ? var_idx->cols : var_all; }
+
+protected:
+
+    virtual bool set_params( const CvSVMParams& params );
+    virtual bool train1( int sample_count, int var_count, const float** samples,
+                    const void* responses, double Cp, double Cn,
+                    CvMemStorage* _storage, double* alpha, double& rho );
+    virtual bool do_train( int svm_type, int sample_count, int var_count, const float** samples,
+                    const CvMat* responses, CvMemStorage* _storage, double* alpha );
+    virtual void create_kernel();
+    virtual void create_solver();
+
+    virtual float predict( const float* row_sample, int row_len, bool returnDFVal=false ) const;
+
+    virtual void write_params( CvFileStorage* fs ) const;
+    virtual void read_params( CvFileStorage* fs, CvFileNode* node );
+
+    void optimize_linear_svm();
+
+    CvSVMParams params;
+    CvMat* class_labels;
+    int var_all;
+    float** sv;
+    int sv_total;
+    CvMat* var_idx;
+    CvMat* class_weights;
+    CvSVMDecisionFunc* decision_func;
+    CvMemStorage* storage;
+
+    CvSVMSolver* solver;
+    CvSVMKernel* kernel;
+
+private:
+    CvSVM(const CvSVM&);
+    CvSVM& operator = (const CvSVM&);
+};
+
+/****************************************************************************************\
+*                              Expectation - Maximization                                *
+\****************************************************************************************/
+namespace cv
+{
+class EM : public Algorithm
+{
+public:
+    // Type of covariation matrices
+    enum {COV_MAT_SPHERICAL=0, COV_MAT_DIAGONAL=1, COV_MAT_GENERIC=2, COV_MAT_DEFAULT=COV_MAT_DIAGONAL};
+
+    // Default parameters
+    enum {DEFAULT_NCLUSTERS=5, DEFAULT_MAX_ITERS=100};
+
+    // The initial step
+    enum {START_E_STEP=1, START_M_STEP=2, START_AUTO_STEP=0};
+
+    CV_WRAP EM(int nclusters=EM::DEFAULT_NCLUSTERS, int covMatType=EM::COV_MAT_DIAGONAL,
+       const TermCriteria& termCrit=TermCriteria(TermCriteria::COUNT+TermCriteria::EPS,
+                                                 EM::DEFAULT_MAX_ITERS, FLT_EPSILON));
+
+    virtual ~EM();
+    CV_WRAP virtual void clear();
+
+    CV_WRAP virtual bool train(InputArray samples,
+                       OutputArray logLikelihoods=noArray(),
+                       OutputArray labels=noArray(),
+                       OutputArray probs=noArray());
+
+    CV_WRAP virtual bool trainE(InputArray samples,
+                        InputArray means0,
+                        InputArray covs0=noArray(),
+                        InputArray weights0=noArray(),
+                        OutputArray logLikelihoods=noArray(),
+                        OutputArray labels=noArray(),
+                        OutputArray probs=noArray());
+
+    CV_WRAP virtual bool trainM(InputArray samples,
+                        InputArray probs0,
+                        OutputArray logLikelihoods=noArray(),
+                        OutputArray labels=noArray(),
+                        OutputArray probs=noArray());
+
+    CV_WRAP Vec2d predict(InputArray sample,
+                OutputArray probs=noArray()) const;
+
+    CV_WRAP bool isTrained() const;
+
+    AlgorithmInfo* info() const;
+    virtual void read(const FileNode& fn);
+
+protected:
+
+    virtual void setTrainData(int startStep, const Mat& samples,
+                              const Mat* probs0,
+                              const Mat* means0,
+                              const std::vector<Mat>* covs0,
+                              const Mat* weights0);
+
+    bool doTrain(int startStep,
+                 OutputArray logLikelihoods,
+                 OutputArray labels,
+                 OutputArray probs);
+    virtual void eStep();
+    virtual void mStep();
+
+    void clusterTrainSamples();
+    void decomposeCovs();
+    void computeLogWeightDivDet();
+
+    Vec2d computeProbabilities(const Mat& sample, Mat* probs) const;
+
+    // all inner matrices have type CV_64FC1
+    CV_PROP_RW int nclusters;
+    CV_PROP_RW int covMatType;
+    CV_PROP_RW int maxIters;
+    CV_PROP_RW double epsilon;
+
+    Mat trainSamples;
+    Mat trainProbs;
+    Mat trainLogLikelihoods;
+    Mat trainLabels;
+
+    CV_PROP Mat weights;
+    CV_PROP Mat means;
+    CV_PROP std::vector<Mat> covs;
+
+    std::vector<Mat> covsEigenValues;
+    std::vector<Mat> covsRotateMats;
+    std::vector<Mat> invCovsEigenValues;
+    Mat logWeightDivDet;
+};
+} // namespace cv
+
+/****************************************************************************************\
+*                                      Decision Tree                                     *
+\****************************************************************************************/\
+struct CvPair16u32s
+{
+    unsigned short* u;
+    int* i;
+};
+
+
+#define CV_DTREE_CAT_DIR(idx,subset) \
+    (2*((subset[(idx)>>5]&(1 << ((idx) & 31)))==0)-1)
+
+struct CvDTreeSplit
+{
+    int var_idx;
+    int condensed_idx;
+    int inversed;
+    float quality;
+    CvDTreeSplit* next;
+    union
+    {
+        int subset[2];
+        struct
+        {
+            float c;
+            int split_point;
+        }
+        ord;
+    };
+};
+
+struct CvDTreeNode
+{
+    int class_idx;
+    int Tn;
+    double value;
+
+    CvDTreeNode* parent;
+    CvDTreeNode* left;
+    CvDTreeNode* right;
+
+    CvDTreeSplit* split;
+
+    int sample_count;
+    int depth;
+    int* num_valid;
+    int offset;
+    int buf_idx;
+    double maxlr;
+
+    // global pruning data
+    int complexity;
+    double alpha;
+    double node_risk, tree_risk, tree_error;
+
+    // cross-validation pruning data
+    int* cv_Tn;
+    double* cv_node_risk;
+    double* cv_node_error;
+
+    int get_num_valid(int vi) { return num_valid ? num_valid[vi] : sample_count; }
+    void set_num_valid(int vi, int n) { if( num_valid ) num_valid[vi] = n; }
+};
+
+
+struct CvDTreeParams
+{
+    CV_PROP_RW int   max_categories;
+    CV_PROP_RW int   max_depth;
+    CV_PROP_RW int   min_sample_count;
+    CV_PROP_RW int   cv_folds;
+    CV_PROP_RW bool  use_surrogates;
+    CV_PROP_RW bool  use_1se_rule;
+    CV_PROP_RW bool  truncate_pruned_tree;
+    CV_PROP_RW float regression_accuracy;
+    const float* priors;
+
+    CvDTreeParams();
+    CvDTreeParams( int max_depth, int min_sample_count,
+                   float regression_accuracy, bool use_surrogates,
+                   int max_categories, int cv_folds,
+                   bool use_1se_rule, bool truncate_pruned_tree,
+                   const float* priors );
+};
+
+
+struct CvDTreeTrainData
+{
+    CvDTreeTrainData();
+    CvDTreeTrainData( const CvMat* trainData, int tflag,
+                      const CvMat* responses, const CvMat* varIdx=0,
+                      const CvMat* sampleIdx=0, const CvMat* varType=0,
+                      const CvMat* missingDataMask=0,
+                      const CvDTreeParams& params=CvDTreeParams(),
+                      bool _shared=false, bool _add_labels=false );
+    virtual ~CvDTreeTrainData();
+
+    virtual void set_data( const CvMat* trainData, int tflag,
+                          const CvMat* responses, const CvMat* varIdx=0,
+                          const CvMat* sampleIdx=0, const CvMat* varType=0,
+                          const CvMat* missingDataMask=0,
+                          const CvDTreeParams& params=CvDTreeParams(),
+                          bool _shared=false, bool _add_labels=false,
+                          bool _update_data=false );
+    virtual void do_responses_copy();
+
+    virtual void get_vectors( const CvMat* _subsample_idx,
+         float* values, uchar* missing, float* responses, bool get_class_idx=false );
+
+    virtual CvDTreeNode* subsample_data( const CvMat* _subsample_idx );
+
+    virtual void write_params( CvFileStorage* fs ) const;
+    virtual void read_params( CvFileStorage* fs, CvFileNode* node );
+
+    // release all the data
+    virtual void clear();
+
+    int get_num_classes() const;
+    int get_var_type(int vi) const;
+    int get_work_var_count() const {return work_var_count;}
+
+    virtual const float* get_ord_responses( CvDTreeNode* n, float* values_buf, int* sample_indices_buf );
+    virtual const int* get_class_labels( CvDTreeNode* n, int* labels_buf );
+    virtual const int* get_cv_labels( CvDTreeNode* n, int* labels_buf );
+    virtual const int* get_sample_indices( CvDTreeNode* n, int* indices_buf );
+    virtual const int* get_cat_var_data( CvDTreeNode* n, int vi, int* cat_values_buf );
+    virtual void get_ord_var_data( CvDTreeNode* n, int vi, float* ord_values_buf, int* sorted_indices_buf,
+                                   const float** ord_values, const int** sorted_indices, int* sample_indices_buf );
+    virtual int get_child_buf_idx( CvDTreeNode* n );
+
+    ////////////////////////////////////
+
+    virtual bool set_params( const CvDTreeParams& params );
+    virtual CvDTreeNode* new_node( CvDTreeNode* parent, int count,
+                                   int storage_idx, int offset );
+
+    virtual CvDTreeSplit* new_split_ord( int vi, float cmp_val,
+                int split_point, int inversed, float quality );
+    virtual CvDTreeSplit* new_split_cat( int vi, float quality );
+    virtual void free_node_data( CvDTreeNode* node );
+    virtual void free_train_data();
+    virtual void free_node( CvDTreeNode* node );
+
+    int sample_count, var_all, var_count, max_c_count;
+    int ord_var_count, cat_var_count, work_var_count;
+    bool have_labels, have_priors;
+    bool is_classifier;
+    int tflag;
+
+    const CvMat* train_data;
+    const CvMat* responses;
+    CvMat* responses_copy; // used in Boosting
+
+    int buf_count, buf_size; // buf_size is obsolete, please do not use it, use expression ((int64)buf->rows * (int64)buf->cols / buf_count) instead
+    bool shared;
+    int is_buf_16u;
+
+    CvMat* cat_count;
+    CvMat* cat_ofs;
+    CvMat* cat_map;
+
+    CvMat* counts;
+    CvMat* buf;
+    inline size_t get_length_subbuf() const
+    {
+        size_t res = (size_t)(work_var_count + 1) * (size_t)sample_count;
+        return res;
+    }
+
+    CvMat* direction;
+    CvMat* split_buf;
+
+    CvMat* var_idx;
+    CvMat* var_type; // i-th element =
+                     //   k<0  - ordered
+                     //   k>=0 - categorical, see k-th element of cat_* arrays
+    CvMat* priors;
+    CvMat* priors_mult;
+
+    CvDTreeParams params;
+
+    CvMemStorage* tree_storage;
+    CvMemStorage* temp_storage;
+
+    CvDTreeNode* data_root;
+
+    CvSet* node_heap;
+    CvSet* split_heap;
+    CvSet* cv_heap;
+    CvSet* nv_heap;
+
+    cv::RNG* rng;
+};
+
+class CvDTree;
+class CvForestTree;
+
+namespace cv
+{
+    struct DTreeBestSplitFinder;
+    struct ForestTreeBestSplitFinder;
+}
+
+class CvDTree : public CvStatModel
+{
+public:
+    CV_WRAP CvDTree();
+    virtual ~CvDTree();
+
+    virtual bool train( const CvMat* trainData, int tflag,
+                        const CvMat* responses, const CvMat* varIdx=0,
+                        const CvMat* sampleIdx=0, const CvMat* varType=0,
+                        const CvMat* missingDataMask=0,
+                        CvDTreeParams params=CvDTreeParams() );
+
+    virtual bool train( CvMLData* trainData, CvDTreeParams params=CvDTreeParams() );
+
+    // type in {CV_TRAIN_ERROR, CV_TEST_ERROR}
+    virtual float calc_error( CvMLData* trainData, int type, std::vector<float> *resp = 0 );
+
+    virtual bool train( CvDTreeTrainData* trainData, const CvMat* subsampleIdx );
+
+    virtual CvDTreeNode* predict( const CvMat* sample, const CvMat* missingDataMask=0,
+                                  bool preprocessedInput=false ) const;
+
+    CV_WRAP virtual bool train( const cv::Mat& trainData, int tflag,
+                       const cv::Mat& responses, const cv::Mat& varIdx=cv::Mat(),
+                       const cv::Mat& sampleIdx=cv::Mat(), const cv::Mat& varType=cv::Mat(),
+                       const cv::Mat& missingDataMask=cv::Mat(),
+                       CvDTreeParams params=CvDTreeParams() );
+
+    CV_WRAP virtual CvDTreeNode* predict( const cv::Mat& sample, const cv::Mat& missingDataMask=cv::Mat(),
+                                  bool preprocessedInput=false ) const;
+    CV_WRAP virtual cv::Mat getVarImportance();
+
+    virtual const CvMat* get_var_importance();
+    CV_WRAP virtual void clear();
+
+    virtual void read( CvFileStorage* fs, CvFileNode* node );
+    virtual void write( CvFileStorage* fs, const char* name ) const;
+
+    // special read & write methods for trees in the tree ensembles
+    virtual void read( CvFileStorage* fs, CvFileNode* node,
+                       CvDTreeTrainData* data );
+    virtual void write( CvFileStorage* fs ) const;
+
+    const CvDTreeNode* get_root() const;
+    int get_pruned_tree_idx() const;
+    CvDTreeTrainData* get_data();
+
+protected:
+    friend struct cv::DTreeBestSplitFinder;
+
+    virtual bool do_train( const CvMat* _subsample_idx );
+
+    virtual void try_split_node( CvDTreeNode* n );
+    virtual void split_node_data( CvDTreeNode* n );
+    virtual CvDTreeSplit* find_best_split( CvDTreeNode* n );
+    virtual CvDTreeSplit* find_split_ord_class( CvDTreeNode* n, int vi,
+                            float init_quality = 0, CvDTreeSplit* _split = 0, uchar* ext_buf = 0 );
+    virtual CvDTreeSplit* find_split_cat_class( CvDTreeNode* n, int vi,
+                            float init_quality = 0, CvDTreeSplit* _split = 0, uchar* ext_buf = 0 );
+    virtual CvDTreeSplit* find_split_ord_reg( CvDTreeNode* n, int vi,
+                            float init_quality = 0, CvDTreeSplit* _split = 0, uchar* ext_buf = 0 );
+    virtual CvDTreeSplit* find_split_cat_reg( CvDTreeNode* n, int vi,
+                            float init_quality = 0, CvDTreeSplit* _split = 0, uchar* ext_buf = 0 );
+    virtual CvDTreeSplit* find_surrogate_split_ord( CvDTreeNode* n, int vi, uchar* ext_buf = 0 );
+    virtual CvDTreeSplit* find_surrogate_split_cat( CvDTreeNode* n, int vi, uchar* ext_buf = 0 );
+    virtual double calc_node_dir( CvDTreeNode* node );
+    virtual void complete_node_dir( CvDTreeNode* node );
+    virtual void cluster_categories( const int* vectors, int vector_count,
+        int var_count, int* sums, int k, int* cluster_labels );
+
+    virtual void calc_node_value( CvDTreeNode* node );
+
+    virtual void prune_cv();
+    virtual double update_tree_rnc( int T, int fold );
+    virtual int cut_tree( int T, int fold, double min_alpha );
+    virtual void free_prune_data(bool cut_tree);
+    virtual void free_tree();
+
+    virtual void write_node( CvFileStorage* fs, CvDTreeNode* node ) const;
+    virtual void write_split( CvFileStorage* fs, CvDTreeSplit* split ) const;
+    virtual CvDTreeNode* read_node( CvFileStorage* fs, CvFileNode* node, CvDTreeNode* parent );
+    virtual CvDTreeSplit* read_split( CvFileStorage* fs, CvFileNode* node );
+    virtual void write_tree_nodes( CvFileStorage* fs ) const;
+    virtual void read_tree_nodes( CvFileStorage* fs, CvFileNode* node );
+
+    CvDTreeNode* root;
+    CvMat* var_importance;
+    CvDTreeTrainData* data;
+    CvMat train_data_hdr, responses_hdr;
+    cv::Mat train_data_mat, responses_mat;
+
+public:
+    int pruned_tree_idx;
+};
+
+
+/****************************************************************************************\
+*                                   Random Trees Classifier                              *
+\****************************************************************************************/
+
+class CvRTrees;
+
+class CvForestTree: public CvDTree
+{
+public:
+    CvForestTree();
+    virtual ~CvForestTree();
+
+    virtual bool train( CvDTreeTrainData* trainData, const CvMat* _subsample_idx, CvRTrees* forest );
+
+    virtual int get_var_count() const {return data ? data->var_count : 0;}
+    virtual void read( CvFileStorage* fs, CvFileNode* node, CvRTrees* forest, CvDTreeTrainData* _data );
+
+    /* dummy methods to avoid warnings: BEGIN */
+    virtual bool train( const CvMat* trainData, int tflag,
+                        const CvMat* responses, const CvMat* varIdx=0,
+                        const CvMat* sampleIdx=0, const CvMat* varType=0,
+                        const CvMat* missingDataMask=0,
+                        CvDTreeParams params=CvDTreeParams() );
+
+    virtual bool train( CvDTreeTrainData* trainData, const CvMat* _subsample_idx );
+    virtual void read( CvFileStorage* fs, CvFileNode* node );
+    virtual void read( CvFileStorage* fs, CvFileNode* node,
+                       CvDTreeTrainData* data );
+    /* dummy methods to avoid warnings: END */
+
+protected:
+    friend struct cv::ForestTreeBestSplitFinder;
+
+    virtual CvDTreeSplit* find_best_split( CvDTreeNode* n );
+    CvRTrees* forest;
+};
+
+
+struct CvRTParams : public CvDTreeParams
+{
+    //Parameters for the forest
+    CV_PROP_RW bool calc_var_importance; // true <=> RF processes variable importance
+    CV_PROP_RW int nactive_vars;
+    CV_PROP_RW CvTermCriteria term_crit;
+
+    CvRTParams();
+    CvRTParams( int max_depth, int min_sample_count,
+                float regression_accuracy, bool use_surrogates,
+                int max_categories, const float* priors, bool calc_var_importance,
+                int nactive_vars, int max_num_of_trees_in_the_forest,
+                float forest_accuracy, int termcrit_type );
+};
+
+
+class CvRTrees : public CvStatModel
+{
+public:
+    CV_WRAP CvRTrees();
+    virtual ~CvRTrees();
+    virtual bool train( const CvMat* trainData, int tflag,
+                        const CvMat* responses, const CvMat* varIdx=0,
+                        const CvMat* sampleIdx=0, const CvMat* varType=0,
+                        const CvMat* missingDataMask=0,
+                        CvRTParams params=CvRTParams() );
+
+    virtual bool train( CvMLData* data, CvRTParams params=CvRTParams() );
+    virtual float predict( const CvMat* sample, const CvMat* missing = 0 ) const;
+    virtual float predict_prob( const CvMat* sample, const CvMat* missing = 0 ) const;
+
+    CV_WRAP virtual bool train( const cv::Mat& trainData, int tflag,
+                       const cv::Mat& responses, const cv::Mat& varIdx=cv::Mat(),
+                       const cv::Mat& sampleIdx=cv::Mat(), const cv::Mat& varType=cv::Mat(),
+                       const cv::Mat& missingDataMask=cv::Mat(),
+                       CvRTParams params=CvRTParams() );
+    CV_WRAP virtual float predict( const cv::Mat& sample, const cv::Mat& missing = cv::Mat() ) const;
+    CV_WRAP virtual float predict_prob( const cv::Mat& sample, const cv::Mat& missing = cv::Mat() ) const;
+    CV_WRAP virtual cv::Mat getVarImportance();
+
+    CV_WRAP virtual void clear();
+
+    virtual const CvMat* get_var_importance();
+    virtual float get_proximity( const CvMat* sample1, const CvMat* sample2,
+        const CvMat* missing1 = 0, const CvMat* missing2 = 0 ) const;
+
+    virtual float calc_error( CvMLData* data, int type , std::vector<float>* resp = 0 ); // type in {CV_TRAIN_ERROR, CV_TEST_ERROR}
+
+    virtual float get_train_error();
+
+    virtual void read( CvFileStorage* fs, CvFileNode* node );
+    virtual void write( CvFileStorage* fs, const char* name ) const;
+
+    CvMat* get_active_var_mask();
+    CvRNG* get_rng();
+
+    int get_tree_count() const;
+    CvForestTree* get_tree(int i) const;
+
+protected:
+    virtual cv::String getName() const;
+
+    virtual bool grow_forest( const CvTermCriteria term_crit );
+
+    // array of the trees of the forest
+    CvForestTree** trees;
+    CvDTreeTrainData* data;
+    CvMat train_data_hdr, responses_hdr;
+    cv::Mat train_data_mat, responses_mat;
+    int ntrees;
+    int nclasses;
+    double oob_error;
+    CvMat* var_importance;
+    int nsamples;
+
+    cv::RNG* rng;
+    CvMat* active_var_mask;
+};
+
+/****************************************************************************************\
+*                           Extremely randomized trees Classifier                        *
+\****************************************************************************************/
+struct CvERTreeTrainData : public CvDTreeTrainData
+{
+    virtual void set_data( const CvMat* trainData, int tflag,
+                          const CvMat* responses, const CvMat* varIdx=0,
+                          const CvMat* sampleIdx=0, const CvMat* varType=0,
+                          const CvMat* missingDataMask=0,
+                          const CvDTreeParams& params=CvDTreeParams(),
+                          bool _shared=false, bool _add_labels=false,
+                          bool _update_data=false );
+    virtual void get_ord_var_data( CvDTreeNode* n, int vi, float* ord_values_buf, int* missing_buf,
+                                   const float** ord_values, const int** missing, int* sample_buf = 0 );
+    virtual const int* get_sample_indices( CvDTreeNode* n, int* indices_buf );
+    virtual const int* get_cv_labels( CvDTreeNode* n, int* labels_buf );
+    virtual const int* get_cat_var_data( CvDTreeNode* n, int vi, int* cat_values_buf );
+    virtual void get_vectors( const CvMat* _subsample_idx, float* values, uchar* missing,
+                              float* responses, bool get_class_idx=false );
+    virtual CvDTreeNode* subsample_data( const CvMat* _subsample_idx );
+    const CvMat* missing_mask;
+};
+
+class CvForestERTree : public CvForestTree
+{
+protected:
+    virtual double calc_node_dir( CvDTreeNode* node );
+    virtual CvDTreeSplit* find_split_ord_class( CvDTreeNode* n, int vi,
+        float init_quality = 0, CvDTreeSplit* _split = 0, uchar* ext_buf = 0 );
+    virtual CvDTreeSplit* find_split_cat_class( CvDTreeNode* n, int vi,
+        float init_quality = 0, CvDTreeSplit* _split = 0, uchar* ext_buf = 0 );
+    virtual CvDTreeSplit* find_split_ord_reg( CvDTreeNode* n, int vi,
+        float init_quality = 0, CvDTreeSplit* _split = 0, uchar* ext_buf = 0 );
+    virtual CvDTreeSplit* find_split_cat_reg( CvDTreeNode* n, int vi,
+        float init_quality = 0, CvDTreeSplit* _split = 0, uchar* ext_buf = 0 );
+    virtual void split_node_data( CvDTreeNode* n );
+};
+
+class CvERTrees : public CvRTrees
+{
+public:
+    CV_WRAP CvERTrees();
+    virtual ~CvERTrees();
+    virtual bool train( const CvMat* trainData, int tflag,
+                        const CvMat* responses, const CvMat* varIdx=0,
+                        const CvMat* sampleIdx=0, const CvMat* varType=0,
+                        const CvMat* missingDataMask=0,
+                        CvRTParams params=CvRTParams());
+    CV_WRAP virtual bool train( const cv::Mat& trainData, int tflag,
+                       const cv::Mat& responses, const cv::Mat& varIdx=cv::Mat(),
+                       const cv::Mat& sampleIdx=cv::Mat(), const cv::Mat& varType=cv::Mat(),
+                       const cv::Mat& missingDataMask=cv::Mat(),
+                       CvRTParams params=CvRTParams());
+    virtual bool train( CvMLData* data, CvRTParams params=CvRTParams() );
+protected:
+    virtual cv::String getName() const;
+    virtual bool grow_forest( const CvTermCriteria term_crit );
+};
+
+
+/****************************************************************************************\
+*                                   Boosted tree classifier                              *
+\****************************************************************************************/
+
+struct CvBoostParams : public CvDTreeParams
+{
+    CV_PROP_RW int boost_type;
+    CV_PROP_RW int weak_count;
+    CV_PROP_RW int split_criteria;
+    CV_PROP_RW double weight_trim_rate;
+
+    CvBoostParams();
+    CvBoostParams( int boost_type, int weak_count, double weight_trim_rate,
+                   int max_depth, bool use_surrogates, const float* priors );
+};
+
+
+class CvBoost;
+
+class CvBoostTree: public CvDTree
+{
+public:
+    CvBoostTree();
+    virtual ~CvBoostTree();
+
+    virtual bool train( CvDTreeTrainData* trainData,
+                        const CvMat* subsample_idx, CvBoost* ensemble );
+
+    virtual void scale( double s );
+    virtual void read( CvFileStorage* fs, CvFileNode* node,
+                       CvBoost* ensemble, CvDTreeTrainData* _data );
+    virtual void clear();
+
+    /* dummy methods to avoid warnings: BEGIN */
+    virtual bool train( const CvMat* trainData, int tflag,
+                        const CvMat* responses, const CvMat* varIdx=0,
+                        const CvMat* sampleIdx=0, const CvMat* varType=0,
+                        const CvMat* missingDataMask=0,
+                        CvDTreeParams params=CvDTreeParams() );
+    virtual bool train( CvDTreeTrainData* trainData, const CvMat* _subsample_idx );
+
+    virtual void read( CvFileStorage* fs, CvFileNode* node );
+    virtual void read( CvFileStorage* fs, CvFileNode* node,
+                       CvDTreeTrainData* data );
+    /* dummy methods to avoid warnings: END */
+
+protected:
+
+    virtual void try_split_node( CvDTreeNode* n );
+    virtual CvDTreeSplit* find_surrogate_split_ord( CvDTreeNode* n, int vi, uchar* ext_buf = 0 );
+    virtual CvDTreeSplit* find_surrogate_split_cat( CvDTreeNode* n, int vi, uchar* ext_buf = 0 );
+    virtual CvDTreeSplit* find_split_ord_class( CvDTreeNode* n, int vi,
+        float init_quality = 0, CvDTreeSplit* _split = 0, uchar* ext_buf = 0 );
+    virtual CvDTreeSplit* find_split_cat_class( CvDTreeNode* n, int vi,
+        float init_quality = 0, CvDTreeSplit* _split = 0, uchar* ext_buf = 0 );
+    virtual CvDTreeSplit* find_split_ord_reg( CvDTreeNode* n, int vi,
+        float init_quality = 0, CvDTreeSplit* _split = 0, uchar* ext_buf = 0 );
+    virtual CvDTreeSplit* find_split_cat_reg( CvDTreeNode* n, int vi,
+        float init_quality = 0, CvDTreeSplit* _split = 0, uchar* ext_buf = 0 );
+    virtual void calc_node_value( CvDTreeNode* n );
+    virtual double calc_node_dir( CvDTreeNode* n );
+
+    CvBoost* ensemble;
+};
+
+
+class CvBoost : public CvStatModel
+{
+public:
+    // Boosting type
+    enum { DISCRETE=0, REAL=1, LOGIT=2, GENTLE=3 };
+
+    // Splitting criteria
+    enum { DEFAULT=0, GINI=1, MISCLASS=3, SQERR=4 };
+
+    CV_WRAP CvBoost();
+    virtual ~CvBoost();
+
+    CvBoost( const CvMat* trainData, int tflag,
+             const CvMat* responses, const CvMat* varIdx=0,
+             const CvMat* sampleIdx=0, const CvMat* varType=0,
+             const CvMat* missingDataMask=0,
+             CvBoostParams params=CvBoostParams() );
+
+    virtual bool train( const CvMat* trainData, int tflag,
+             const CvMat* responses, const CvMat* varIdx=0,
+             const CvMat* sampleIdx=0, const CvMat* varType=0,
+             const CvMat* missingDataMask=0,
+             CvBoostParams params=CvBoostParams(),
+             bool update=false );
+
+    virtual bool train( CvMLData* data,
+             CvBoostParams params=CvBoostParams(),
+             bool update=false );
+
+    virtual float predict( const CvMat* sample, const CvMat* missing=0,
+                           CvMat* weak_responses=0, CvSlice slice=CV_WHOLE_SEQ,
+                           bool raw_mode=false, bool return_sum=false ) const;
+
+    CV_WRAP CvBoost( const cv::Mat& trainData, int tflag,
+            const cv::Mat& responses, const cv::Mat& varIdx=cv::Mat(),
+            const cv::Mat& sampleIdx=cv::Mat(), const cv::Mat& varType=cv::Mat(),
+            const cv::Mat& missingDataMask=cv::Mat(),
+            CvBoostParams params=CvBoostParams() );
+
+    CV_WRAP virtual bool train( const cv::Mat& trainData, int tflag,
+                       const cv::Mat& responses, const cv::Mat& varIdx=cv::Mat(),
+                       const cv::Mat& sampleIdx=cv::Mat(), const cv::Mat& varType=cv::Mat(),
+                       const cv::Mat& missingDataMask=cv::Mat(),
+                       CvBoostParams params=CvBoostParams(),
+                       bool update=false );
+
+    CV_WRAP virtual float predict( const cv::Mat& sample, const cv::Mat& missing=cv::Mat(),
+                                   const cv::Range& slice=cv::Range::all(), bool rawMode=false,
+                                   bool returnSum=false ) const;
+
+    virtual float calc_error( CvMLData* _data, int type , std::vector<float> *resp = 0 ); // type in {CV_TRAIN_ERROR, CV_TEST_ERROR}
+
+    CV_WRAP virtual void prune( CvSlice slice );
+
+    CV_WRAP virtual void clear();
+
+    virtual void write( CvFileStorage* storage, const char* name ) const;
+    virtual void read( CvFileStorage* storage, CvFileNode* node );
+    virtual const CvMat* get_active_vars(bool absolute_idx=true);
+
+    CvSeq* get_weak_predictors();
+
+    CvMat* get_weights();
+    CvMat* get_subtree_weights();
+    CvMat* get_weak_response();
+    const CvBoostParams& get_params() const;
+    const CvDTreeTrainData* get_data() const;
+
+protected:
+
+    virtual bool set_params( const CvBoostParams& params );
+    virtual void update_weights( CvBoostTree* tree );
+    virtual void trim_weights();
+    virtual void write_params( CvFileStorage* fs ) const;
+    virtual void read_params( CvFileStorage* fs, CvFileNode* node );
+
+    virtual void initialize_weights(double (&p)[2]);
+
+    CvDTreeTrainData* data;
+    CvMat train_data_hdr, responses_hdr;
+    cv::Mat train_data_mat, responses_mat;
+    CvBoostParams params;
+    CvSeq* weak;
+
+    CvMat* active_vars;
+    CvMat* active_vars_abs;
+    bool have_active_cat_vars;
+
+    CvMat* orig_response;
+    CvMat* sum_response;
+    CvMat* weak_eval;
+    CvMat* subsample_mask;
+    CvMat* weights;
+    CvMat* subtree_weights;
+    bool have_subsample;
+};
+
+
+/****************************************************************************************\
+*                                   Gradient Boosted Trees                               *
+\****************************************************************************************/
+
+// DataType: STRUCT CvGBTreesParams
+// Parameters of GBT (Gradient Boosted trees model), including single
+// tree settings and ensemble parameters.
+//
+// weak_count          - count of trees in the ensemble
+// loss_function_type  - loss function used for ensemble training
+// subsample_portion   - portion of whole training set used for
+//                       every single tree training.
+//                       subsample_portion value is in (0.0, 1.0].
+//                       subsample_portion == 1.0 when whole dataset is
+//                       used on each step. Count of sample used on each
+//                       step is computed as
+//                       int(total_samples_count * subsample_portion).
+// shrinkage           - regularization parameter.
+//                       Each tree prediction is multiplied on shrinkage value.
+
+
+struct CvGBTreesParams : public CvDTreeParams
+{
+    CV_PROP_RW int weak_count;
+    CV_PROP_RW int loss_function_type;
+    CV_PROP_RW float subsample_portion;
+    CV_PROP_RW float shrinkage;
+
+    CvGBTreesParams();
+    CvGBTreesParams( int loss_function_type, int weak_count, float shrinkage,
+        float subsample_portion, int max_depth, bool use_surrogates );
+};
+
+// DataType: CLASS CvGBTrees
+// Gradient Boosting Trees (GBT) algorithm implementation.
+//
+// data             - training dataset
+// params           - parameters of the CvGBTrees
+// weak             - array[0..(class_count-1)] of CvSeq
+//                    for storing tree ensembles
+// orig_response    - original responses of the training set samples
+// sum_response     - predicitons of the current model on the training dataset.
+//                    this matrix is updated on every iteration.
+// sum_response_tmp - predicitons of the model on the training set on the next
+//                    step. On every iteration values of sum_responses_tmp are
+//                    computed via sum_responses values. When the current
+//                    step is complete sum_response values become equal to
+//                    sum_responses_tmp.
+// sampleIdx       - indices of samples used for training the ensemble.
+//                    CvGBTrees training procedure takes a set of samples
+//                    (train_data) and a set of responses (responses).
+//                    Only pairs (train_data[i], responses[i]), where i is
+//                    in sample_idx are used for training the ensemble.
+// subsample_train  - indices of samples used for training a single decision
+//                    tree on the current step. This indices are countered
+//                    relatively to the sample_idx, so that pairs
+//                    (train_data[sample_idx[i]], responses[sample_idx[i]])
+//                    are used for training a decision tree.
+//                    Training set is randomly splited
+//                    in two parts (subsample_train and subsample_test)
+//                    on every iteration accordingly to the portion parameter.
+// subsample_test   - relative indices of samples from the training set,
+//                    which are not used for training a tree on the current
+//                    step.
+// missing          - mask of the missing values in the training set. This
+//                    matrix has the same size as train_data. 1 - missing
+//                    value, 0 - not a missing value.
+// class_labels     - output class labels map.
+// rng              - random number generator. Used for spliting the
+//                    training set.
+// class_count      - count of output classes.
+//                    class_count == 1 in the case of regression,
+//                    and > 1 in the case of classification.
+// delta            - Huber loss function parameter.
+// base_value       - start point of the gradient descent procedure.
+//                    model prediction is
+//                    f(x) = f_0 + sum_{i=1..weak_count-1}(f_i(x)), where
+//                    f_0 is the base value.
+
+
+
+class CvGBTrees : public CvStatModel
+{
+public:
+
+    /*
+    // DataType: ENUM
+    // Loss functions implemented in CvGBTrees.
+    //
+    // SQUARED_LOSS
+    // problem: regression
+    // loss = (x - x')^2
+    //
+    // ABSOLUTE_LOSS
+    // problem: regression
+    // loss = abs(x - x')
+    //
+    // HUBER_LOSS
+    // problem: regression
+    // loss = delta*( abs(x - x') - delta/2), if abs(x - x') > delta
+    //           1/2*(x - x')^2, if abs(x - x') <= delta,
+    //           where delta is the alpha-quantile of pseudo responses from
+    //           the training set.
+    //
+    // DEVIANCE_LOSS
+    // problem: classification
+    //
+    */
+    enum {SQUARED_LOSS=0, ABSOLUTE_LOSS, HUBER_LOSS=3, DEVIANCE_LOSS};
+
+
+    /*
+    // Default constructor. Creates a model only (without training).
+    // Should be followed by one form of the train(...) function.
+    //
+    // API
+    // CvGBTrees();
+
+    // INPUT
+    // OUTPUT
+    // RESULT
+    */
+    CV_WRAP CvGBTrees();
+
+
+    /*
+    // Full form constructor. Creates a gradient boosting model and does the
+    // train.
+    //
+    // API
+    // CvGBTrees( const CvMat* trainData, int tflag,
+             const CvMat* responses, const CvMat* varIdx=0,
+             const CvMat* sampleIdx=0, const CvMat* varType=0,
+             const CvMat* missingDataMask=0,
+             CvGBTreesParams params=CvGBTreesParams() );
+
+    // INPUT
+    // trainData    - a set of input feature vectors.
+    //                  size of matrix is
+    //                  <count of samples> x <variables count>
+    //                  or <variables count> x <count of samples>
+    //                  depending on the tflag parameter.
+    //                  matrix values are float.
+    // tflag         - a flag showing how do samples stored in the
+    //                  trainData matrix row by row (tflag=CV_ROW_SAMPLE)
+    //                  or column by column (tflag=CV_COL_SAMPLE).
+    // responses     - a vector of responses corresponding to the samples
+    //                  in trainData.
+    // varIdx       - indices of used variables. zero value means that all
+    //                  variables are active.
+    // sampleIdx    - indices of used samples. zero value means that all
+    //                  samples from trainData are in the training set.
+    // varType      - vector of <variables count> length. gives every
+    //                  variable type CV_VAR_CATEGORICAL or CV_VAR_ORDERED.
+    //                  varType = 0 means all variables are numerical.
+    // missingDataMask  - a mask of misiing values in trainData.
+    //                  missingDataMask = 0 means that there are no missing
+    //                  values.
+    // params         - parameters of GTB algorithm.
+    // OUTPUT
+    // RESULT
+    */
+    CvGBTrees( const CvMat* trainData, int tflag,
+             const CvMat* responses, const CvMat* varIdx=0,
+             const CvMat* sampleIdx=0, const CvMat* varType=0,
+             const CvMat* missingDataMask=0,
+             CvGBTreesParams params=CvGBTreesParams() );
+
+
+    /*
+    // Destructor.
+    */
+    virtual ~CvGBTrees();
+
+
+    /*
+    // Gradient tree boosting model training
+    //
+    // API
+    // virtual bool train( const CvMat* trainData, int tflag,
+             const CvMat* responses, const CvMat* varIdx=0,
+             const CvMat* sampleIdx=0, const CvMat* varType=0,
+             const CvMat* missingDataMask=0,
+             CvGBTreesParams params=CvGBTreesParams(),
+             bool update=false );
+
+    // INPUT
+    // trainData    - a set of input feature vectors.
+    //                  size of matrix is
+    //                  <count of samples> x <variables count>
+    //                  or <variables count> x <count of samples>
+    //                  depending on the tflag parameter.
+    //                  matrix values are float.
+    // tflag         - a flag showing how do samples stored in the
+    //                  trainData matrix row by row (tflag=CV_ROW_SAMPLE)
+    //                  or column by column (tflag=CV_COL_SAMPLE).
+    // responses     - a vector of responses corresponding to the samples
+    //                  in trainData.
+    // varIdx       - indices of used variables. zero value means that all
+    //                  variables are active.
+    // sampleIdx    - indices of used samples. zero value means that all
+    //                  samples from trainData are in the training set.
+    // varType      - vector of <variables count> length. gives every
+    //                  variable type CV_VAR_CATEGORICAL or CV_VAR_ORDERED.
+    //                  varType = 0 means all variables are numerical.
+    // missingDataMask  - a mask of misiing values in trainData.
+    //                  missingDataMask = 0 means that there are no missing
+    //                  values.
+    // params         - parameters of GTB algorithm.
+    // update         - is not supported now. (!)
+    // OUTPUT
+    // RESULT
+    // Error state.
+    */
+    virtual bool train( const CvMat* trainData, int tflag,
+             const CvMat* responses, const CvMat* varIdx=0,
+             const CvMat* sampleIdx=0, const CvMat* varType=0,
+             const CvMat* missingDataMask=0,
+             CvGBTreesParams params=CvGBTreesParams(),
+             bool update=false );
+
+
+    /*
+    // Gradient tree boosting model training
+    //
+    // API
+    // virtual bool train( CvMLData* data,
+             CvGBTreesParams params=CvGBTreesParams(),
+             bool update=false ) {return false;}
+
+    // INPUT
+    // data          - training set.
+    // params        - parameters of GTB algorithm.
+    // update        - is not supported now. (!)
+    // OUTPUT
+    // RESULT
+    // Error state.
+    */
+    virtual bool train( CvMLData* data,
+             CvGBTreesParams params=CvGBTreesParams(),
+             bool update=false );
+
+
+    /*
+    // Response value prediction
+    //
+    // API
+    // virtual float predict_serial( const CvMat* sample, const CvMat* missing=0,
+             CvMat* weak_responses=0, CvSlice slice = CV_WHOLE_SEQ,
+             int k=-1 ) const;
+
+    // INPUT
+    // sample         - input sample of the same type as in the training set.
+    // missing        - missing values mask. missing=0 if there are no
+    //                   missing values in sample vector.
+    // weak_responses  - predictions of all of the trees.
+    //                   not implemented (!)
+    // slice           - part of the ensemble used for prediction.
+    //                   slice = CV_WHOLE_SEQ when all trees are used.
+    // k               - number of ensemble used.
+    //                   k is in {-1,0,1,..,<count of output classes-1>}.
+    //                   in the case of classification problem
+    //                   <count of output classes-1> ensembles are built.
+    //                   If k = -1 ordinary prediction is the result,
+    //                   otherwise function gives the prediction of the
+    //                   k-th ensemble only.
+    // OUTPUT
+    // RESULT
+    // Predicted value.
+    */
+    virtual float predict_serial( const CvMat* sample, const CvMat* missing=0,
+            CvMat* weakResponses=0, CvSlice slice = CV_WHOLE_SEQ,
+            int k=-1 ) const;
+
+    /*
+    // Response value prediction.
+    // Parallel version (in the case of TBB existence)
+    //
+    // API
+    // virtual float predict( const CvMat* sample, const CvMat* missing=0,
+             CvMat* weak_responses=0, CvSlice slice = CV_WHOLE_SEQ,
+             int k=-1 ) const;
+
+    // INPUT
+    // sample         - input sample of the same type as in the training set.
+    // missing        - missing values mask. missing=0 if there are no
+    //                   missing values in sample vector.
+    // weak_responses  - predictions of all of the trees.
+    //                   not implemented (!)
+    // slice           - part of the ensemble used for prediction.
+    //                   slice = CV_WHOLE_SEQ when all trees are used.
+    // k               - number of ensemble used.
+    //                   k is in {-1,0,1,..,<count of output classes-1>}.
+    //                   in the case of classification problem
+    //                   <count of output classes-1> ensembles are built.
+    //                   If k = -1 ordinary prediction is the result,
+    //                   otherwise function gives the prediction of the
+    //                   k-th ensemble only.
+    // OUTPUT
+    // RESULT
+    // Predicted value.
+    */
+    virtual float predict( const CvMat* sample, const CvMat* missing=0,
+            CvMat* weakResponses=0, CvSlice slice = CV_WHOLE_SEQ,
+            int k=-1 ) const;
+
+    /*
+    // Deletes all the data.
+    //
+    // API
+    // virtual void clear();
+
+    // INPUT
+    // OUTPUT
+    // delete data, weak, orig_response, sum_response,
+    //        weak_eval, subsample_train, subsample_test,
+    //        sample_idx, missing, lass_labels
+    // delta = 0.0
+    // RESULT
+    */
+    CV_WRAP virtual void clear();
+
+    /*
+    // Compute error on the train/test set.
+    //
+    // API
+    // virtual float calc_error( CvMLData* _data, int type,
+    //        std::vector<float> *resp = 0 );
+    //
+    // INPUT
+    // data  - dataset
+    // type  - defines which error is to compute: train (CV_TRAIN_ERROR) or
+    //         test (CV_TEST_ERROR).
+    // OUTPUT
+    // resp  - vector of predicitons
+    // RESULT
+    // Error value.
+    */
+    virtual float calc_error( CvMLData* _data, int type,
+            std::vector<float> *resp = 0 );
+
+    /*
+    //
+    // Write parameters of the gtb model and data. Write learned model.
+    //
+    // API
+    // virtual void write( CvFileStorage* fs, const char* name ) const;
+    //
+    // INPUT
+    // fs     - file storage to read parameters from.
+    // name   - model name.
+    // OUTPUT
+    // RESULT
+    */
+    virtual void write( CvFileStorage* fs, const char* name ) const;
+
+
+    /*
+    //
+    // Read parameters of the gtb model and data. Read learned model.
+    //
+    // API
+    // virtual void read( CvFileStorage* fs, CvFileNode* node );
+    //
+    // INPUT
+    // fs     - file storage to read parameters from.
+    // node   - file node.
+    // OUTPUT
+    // RESULT
+    */
+    virtual void read( CvFileStorage* fs, CvFileNode* node );
+
+
+    // new-style C++ interface
+    CV_WRAP CvGBTrees( const cv::Mat& trainData, int tflag,
+              const cv::Mat& responses, const cv::Mat& varIdx=cv::Mat(),
+              const cv::Mat& sampleIdx=cv::Mat(), const cv::Mat& varType=cv::Mat(),
+              const cv::Mat& missingDataMask=cv::Mat(),
+              CvGBTreesParams params=CvGBTreesParams() );
+
+    CV_WRAP virtual bool train( const cv::Mat& trainData, int tflag,
+                       const cv::Mat& responses, const cv::Mat& varIdx=cv::Mat(),
+                       const cv::Mat& sampleIdx=cv::Mat(), const cv::Mat& varType=cv::Mat(),
+                       const cv::Mat& missingDataMask=cv::Mat(),
+                       CvGBTreesParams params=CvGBTreesParams(),
+                       bool update=false );
+
+    CV_WRAP virtual float predict( const cv::Mat& sample, const cv::Mat& missing=cv::Mat(),
+                           const cv::Range& slice = cv::Range::all(),
+                           int k=-1 ) const;
+
+protected:
+
+    /*
+    // Compute the gradient vector components.
+    //
+    // API
+    // virtual void find_gradient( const int k = 0);
+
+    // INPUT
+    // k        - used for classification problem, determining current
+    //            tree ensemble.
+    // OUTPUT
+    // changes components of data->responses
+    // which correspond to samples used for training
+    // on the current step.
+    // RESULT
+    */
+    virtual void find_gradient( const int k = 0);
+
+
+    /*
+    //
+    // Change values in tree leaves according to the used loss function.
+    //
+    // API
+    // virtual void change_values(CvDTree* tree, const int k = 0);
+    //
+    // INPUT
+    // tree      - decision tree to change.
+    // k         - used for classification problem, determining current
+    //             tree ensemble.
+    // OUTPUT
+    // changes 'value' fields of the trees' leaves.
+    // changes sum_response_tmp.
+    // RESULT
+    */
+    virtual void change_values(CvDTree* tree, const int k = 0);
+
+
+    /*
+    //
+    // Find optimal constant prediction value according to the used loss
+    // function.
+    // The goal is to find a constant which gives the minimal summary loss
+    // on the _Idx samples.
+    //
+    // API
+    // virtual float find_optimal_value( const CvMat* _Idx );
+    //
+    // INPUT
+    // _Idx        - indices of the samples from the training set.
+    // OUTPUT
+    // RESULT
+    // optimal constant value.
+    */
+    virtual float find_optimal_value( const CvMat* _Idx );
+
+
+    /*
+    //
+    // Randomly split the whole training set in two parts according
+    // to params.portion.
+    //
+    // API
+    // virtual void do_subsample();
+    //
+    // INPUT
+    // OUTPUT
+    // subsample_train - indices of samples used for training
+    // subsample_test  - indices of samples used for test
+    // RESULT
+    */
+    virtual void do_subsample();
+
+
+    /*
+    //
+    // Internal recursive function giving an array of subtree tree leaves.
+    //
+    // API
+    // void leaves_get( CvDTreeNode** leaves, int& count, CvDTreeNode* node );
+    //
+    // INPUT
+    // node         - current leaf.
+    // OUTPUT
+    // count        - count of leaves in the subtree.
+    // leaves       - array of pointers to leaves.
+    // RESULT
+    */
+    void leaves_get( CvDTreeNode** leaves, int& count, CvDTreeNode* node );
+
+
+    /*
+    //
+    // Get leaves of the tree.
+    //
+    // API
+    // CvDTreeNode** GetLeaves( const CvDTree* dtree, int& len );
+    //
+    // INPUT
+    // dtree            - decision tree.
+    // OUTPUT
+    // len              - count of the leaves.
+    // RESULT
+    // CvDTreeNode**    - array of pointers to leaves.
+    */
+    CvDTreeNode** GetLeaves( const CvDTree* dtree, int& len );
+
+
+    /*
+    //
+    // Is it a regression or a classification.
+    //
+    // API
+    // bool problem_type();
+    //
+    // INPUT
+    // OUTPUT
+    // RESULT
+    // false if it is a classification problem,
+    // true - if regression.
+    */
+    virtual bool problem_type() const;
+
+
+    /*
+    //
+    // Write parameters of the gtb model.
+    //
+    // API
+    // virtual void write_params( CvFileStorage* fs ) const;
+    //
+    // INPUT
+    // fs           - file storage to write parameters to.
+    // OUTPUT
+    // RESULT
+    */
+    virtual void write_params( CvFileStorage* fs ) const;
+
+
+    /*
+    //
+    // Read parameters of the gtb model and data.
+    //
+    // API
+    // virtual void read_params( CvFileStorage* fs );
+    //
+    // INPUT
+    // fs           - file storage to read parameters from.
+    // OUTPUT
+    // params       - parameters of the gtb model.
+    // data         - contains information about the structure
+    //                of the data set (count of variables,
+    //                their types, etc.).
+    // class_labels - output class labels map.
+    // RESULT
+    */
+    virtual void read_params( CvFileStorage* fs, CvFileNode* fnode );
+    int get_len(const CvMat* mat) const;
+
+
+    CvDTreeTrainData* data;
+    CvGBTreesParams params;
+
+    CvSeq** weak;
+    CvMat* orig_response;
+    CvMat* sum_response;
+    CvMat* sum_response_tmp;
+    CvMat* sample_idx;
+    CvMat* subsample_train;
+    CvMat* subsample_test;
+    CvMat* missing;
+    CvMat* class_labels;
+
+    cv::RNG* rng;
+
+    int class_count;
+    float delta;
+    float base_value;
+
+};
+
+
+
+/****************************************************************************************\
+*                              Artificial Neural Networks (ANN)                          *
+\****************************************************************************************/
+
+/////////////////////////////////// Multi-Layer Perceptrons //////////////////////////////
+
+struct CvANN_MLP_TrainParams
+{
+    CvANN_MLP_TrainParams();
+    CvANN_MLP_TrainParams( CvTermCriteria term_crit, int train_method,
+                           double param1, double param2=0 );
+    ~CvANN_MLP_TrainParams();
+
+    enum { BACKPROP=0, RPROP=1 };
+
+    CV_PROP_RW CvTermCriteria term_crit;
+    CV_PROP_RW int train_method;
+
+    // backpropagation parameters
+    CV_PROP_RW double bp_dw_scale, bp_moment_scale;
+
+    // rprop parameters
+    CV_PROP_RW double rp_dw0, rp_dw_plus, rp_dw_minus, rp_dw_min, rp_dw_max;
+};
+
+
+class CvANN_MLP : public CvStatModel
+{
+public:
+    CV_WRAP CvANN_MLP();
+    CvANN_MLP( const CvMat* layerSizes,
+               int activateFunc=CvANN_MLP::SIGMOID_SYM,
+               double fparam1=0, double fparam2=0 );
+
+    virtual ~CvANN_MLP();
+
+    virtual void create( const CvMat* layerSizes,
+                         int activateFunc=CvANN_MLP::SIGMOID_SYM,
+                         double fparam1=0, double fparam2=0 );
+
+    virtual int train( const CvMat* inputs, const CvMat* outputs,
+                       const CvMat* sampleWeights, const CvMat* sampleIdx=0,
+                       CvANN_MLP_TrainParams params = CvANN_MLP_TrainParams(),
+                       int flags=0 );
+    virtual float predict( const CvMat* inputs, CV_OUT CvMat* outputs ) const;
+
+    CV_WRAP CvANN_MLP( const cv::Mat& layerSizes,
+              int activateFunc=CvANN_MLP::SIGMOID_SYM,
+              double fparam1=0, double fparam2=0 );
+
+    CV_WRAP virtual void create( const cv::Mat& layerSizes,
+                        int activateFunc=CvANN_MLP::SIGMOID_SYM,
+                        double fparam1=0, double fparam2=0 );
+
+    CV_WRAP virtual int train( const cv::Mat& inputs, const cv::Mat& outputs,
+                      const cv::Mat& sampleWeights, const cv::Mat& sampleIdx=cv::Mat(),
+                      CvANN_MLP_TrainParams params = CvANN_MLP_TrainParams(),
+                      int flags=0 );
+
+    CV_WRAP virtual float predict( const cv::Mat& inputs, CV_OUT cv::Mat& outputs ) const;
+
+    CV_WRAP virtual void clear();
+
+    // possible activation functions
+    enum { IDENTITY = 0, SIGMOID_SYM = 1, GAUSSIAN = 2 };
+
+    // available training flags
+    enum { UPDATE_WEIGHTS = 1, NO_INPUT_SCALE = 2, NO_OUTPUT_SCALE = 4 };
+
+    virtual void read( CvFileStorage* fs, CvFileNode* node );
+    virtual void write( CvFileStorage* storage, const char* name ) const;
+
+    int get_layer_count() { return layer_sizes ? layer_sizes->cols : 0; }
+    const CvMat* get_layer_sizes() { return layer_sizes; }
+    double* get_weights(int layer)
+    {
+        return layer_sizes && weights &&
+            (unsigned)layer <= (unsigned)layer_sizes->cols ? weights[layer] : 0;
+    }
+
+    virtual void calc_activ_func_deriv( CvMat* xf, CvMat* deriv, const double* bias ) const;
+
+protected:
+
+    virtual bool prepare_to_train( const CvMat* _inputs, const CvMat* _outputs,
+            const CvMat* _sample_weights, const CvMat* sampleIdx,
+            CvVectors* _ivecs, CvVectors* _ovecs, double** _sw, int _flags );
+
+    // sequential random backpropagation
+    virtual int train_backprop( CvVectors _ivecs, CvVectors _ovecs, const double* _sw );
+
+    // RPROP algorithm
+    virtual int train_rprop( CvVectors _ivecs, CvVectors _ovecs, const double* _sw );
+
+    virtual void calc_activ_func( CvMat* xf, const double* bias ) const;
+    virtual void set_activ_func( int _activ_func=SIGMOID_SYM,
+                                 double _f_param1=0, double _f_param2=0 );
+    virtual void init_weights();
+    virtual void scale_input( const CvMat* _src, CvMat* _dst ) const;
+    virtual void scale_output( const CvMat* _src, CvMat* _dst ) const;
+    virtual void calc_input_scale( const CvVectors* vecs, int flags );
+    virtual void calc_output_scale( const CvVectors* vecs, int flags );
+
+    virtual void write_params( CvFileStorage* fs ) const;
+    virtual void read_params( CvFileStorage* fs, CvFileNode* node );
+
+    CvMat* layer_sizes;
+    CvMat* wbuf;
+    CvMat* sample_weights;
+    double** weights;
+    double f_param1, f_param2;
+    double min_val, max_val, min_val1, max_val1;
+    int activ_func;
+    int max_count, max_buf_sz;
+    CvANN_MLP_TrainParams params;
+    cv::RNG* rng;
+};
+
+/****************************************************************************************\
+*                           Auxilary functions declarations                              *
+\****************************************************************************************/
+
+/* Generates <sample> from multivariate normal distribution, where <mean> - is an
+   average row vector, <cov> - symmetric covariation matrix */
+CVAPI(void) cvRandMVNormal( CvMat* mean, CvMat* cov, CvMat* sample,
+                           CvRNG* rng CV_DEFAULT(0) );
+
+/* Generates sample from gaussian mixture distribution */
+CVAPI(void) cvRandGaussMixture( CvMat* means[],
+                               CvMat* covs[],
+                               float weights[],
+                               int clsnum,
+                               CvMat* sample,
+                               CvMat* sampClasses CV_DEFAULT(0) );
+
+#define CV_TS_CONCENTRIC_SPHERES 0
+
+/* creates test set */
+CVAPI(void) cvCreateTestSet( int type, CvMat** samples,
+                 int num_samples,
+                 int num_features,
+                 CvMat** responses,
+                 int num_classes, ... );
+
+/****************************************************************************************\
+*                                      Data                                             *
+\****************************************************************************************/
+
+#define CV_COUNT     0
+#define CV_PORTION   1
+
+struct CvTrainTestSplit
+{
+    CvTrainTestSplit();
+    CvTrainTestSplit( int train_sample_count, bool mix = true);
+    CvTrainTestSplit( float train_sample_portion, bool mix = true);
+
+    union
+    {
+        int count;
+        float portion;
+    } train_sample_part;
+    int train_sample_part_mode;
+
+    bool mix;
+};
+
+class CvMLData
+{
+public:
+    CvMLData();
+    virtual ~CvMLData();
+
+    // returns:
+    // 0 - OK
+    // -1 - file can not be opened or is not correct
+    int read_csv( const char* filename );
+
+    const CvMat* get_values() const;
+    const CvMat* get_responses();
+    const CvMat* get_missing() const;
+
+    void set_header_lines_number( int n );
+    int get_header_lines_number() const;
+
+    void set_response_idx( int idx ); // old response become predictors, new response_idx = idx
+                                      // if idx < 0 there will be no response
+    int get_response_idx() const;
+
+    void set_train_test_split( const CvTrainTestSplit * spl );
+    const CvMat* get_train_sample_idx() const;
+    const CvMat* get_test_sample_idx() const;
+    void mix_train_and_test_idx();
+
+    const CvMat* get_var_idx();
+    void chahge_var_idx( int vi, bool state ); // misspelled (saved for back compitability),
+                                               // use change_var_idx
+    void change_var_idx( int vi, bool state ); // state == true to set vi-variable as predictor
+
+    const CvMat* get_var_types();
+    int get_var_type( int var_idx ) const;
+    // following 2 methods enable to change vars type
+    // use these methods to assign CV_VAR_CATEGORICAL type for categorical variable
+    // with numerical labels; in the other cases var types are correctly determined automatically
+    void set_var_types( const char* str );  // str examples:
+                                            // "ord[0-17],cat[18]", "ord[0,2,4,10-12], cat[1,3,5-9,13,14]",
+                                            // "cat", "ord" (all vars are categorical/ordered)
+    void change_var_type( int var_idx, int type); // type in { CV_VAR_ORDERED, CV_VAR_CATEGORICAL }
+
+    void set_delimiter( char ch );
+    char get_delimiter() const;
+
+    void set_miss_ch( char ch );
+    char get_miss_ch() const;
+
+    const std::map<cv::String, int>& get_class_labels_map() const;
+
+protected:
+    virtual void clear();
+
+    void str_to_flt_elem( const char* token, float& flt_elem, int& type);
+    void free_train_test_idx();
+
+    char delimiter;
+    char miss_ch;
+    //char flt_separator;
+
+    CvMat* values;
+    CvMat* missing;
+    CvMat* var_types;
+    CvMat* var_idx_mask;
+
+    CvMat* response_out; // header
+    CvMat* var_idx_out; // mat
+    CvMat* var_types_out; // mat
+
+    int header_lines_number;
+
+    int response_idx;
+
+    int train_sample_count;
+    bool mix;
+
+    int total_class_count;
+    std::map<cv::String, int> class_map;
+
+    CvMat* train_sample_idx;
+    CvMat* test_sample_idx;
+    int* sample_idx; // data of train_sample_idx and test_sample_idx
+
+    cv::RNG* rng;
+};
+
+
+namespace cv
+{
+
+typedef CvStatModel StatModel;
+typedef CvParamGrid ParamGrid;
+typedef CvNormalBayesClassifier NormalBayesClassifier;
+typedef CvKNearest KNearest;
+typedef CvSVMParams SVMParams;
+typedef CvSVMKernel SVMKernel;
+typedef CvSVMSolver SVMSolver;
+typedef CvSVM SVM;
+typedef CvDTreeParams DTreeParams;
+typedef CvMLData TrainData;
+typedef CvDTree DecisionTree;
+typedef CvForestTree ForestTree;
+typedef CvRTParams RandomTreeParams;
+typedef CvRTrees RandomTrees;
+typedef CvERTreeTrainData ERTreeTRainData;
+typedef CvForestERTree ERTree;
+typedef CvERTrees ERTrees;
+typedef CvBoostParams BoostParams;
+typedef CvBoostTree BoostTree;
+typedef CvBoost Boost;
+typedef CvANN_MLP_TrainParams ANN_MLP_TrainParams;
+typedef CvANN_MLP NeuralNet_MLP;
+typedef CvGBTreesParams GradientBoostingTreeParams;
+typedef CvGBTrees GradientBoostingTrees;
+
+template<> void DefaultDeleter<CvDTreeSplit>::operator ()(CvDTreeSplit* obj) const;
+
+bool initModule_ml(void);
+}
+
+#endif // __cplusplus
+#endif // __OPENCV_ML_HPP__
+
+/* End of file. */
diff --git a/apps/traincascade/old_ml_boost.cpp b/apps/traincascade/old_ml_boost.cpp
new file mode 100644 (file)
index 0000000..be4cd81
--- /dev/null
@@ -0,0 +1,2162 @@
+/*M///////////////////////////////////////////////////////////////////////////////////////
+//
+//  IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
+//
+//  By downloading, copying, installing or using the software you agree to this license.
+//  If you do not agree to this license, do not download, install,
+//  copy or use the software.
+//
+//
+//                        Intel License Agreement
+//
+// Copyright (C) 2000, Intel Corporation, all rights reserved.
+// Third party copyrights are property of their respective owners.
+//
+// Redistribution and use in source and binary forms, with or without modification,
+// are permitted provided that the following conditions are met:
+//
+//   * Redistribution's of source code must retain the above copyright notice,
+//     this list of conditions and the following disclaimer.
+//
+//   * Redistribution's in binary form must reproduce the above copyright notice,
+//     this list of conditions and the following disclaimer in the documentation
+//     and/or other materials provided with the distribution.
+//
+//   * The name of Intel Corporation may not be used to endorse or promote products
+//     derived from this software without specific prior written permission.
+//
+// This software is provided by the copyright holders and contributors "as is" and
+// any express or implied warranties, including, but not limited to, the implied
+// warranties of merchantability and fitness for a particular purpose are disclaimed.
+// In no event shall the Intel Corporation or contributors be liable for any direct,
+// indirect, incidental, special, exemplary, or consequential damages
+// (including, but not limited to, procurement of substitute goods or services;
+// loss of use, data, or profits; or business interruption) however caused
+// and on any theory of liability, whether in contract, strict liability,
+// or tort (including negligence or otherwise) arising in any way out of
+// the use of this software, even if advised of the possibility of such damage.
+//
+//M*/
+
+#include "old_ml_precomp.hpp"
+
+static inline double
+log_ratio( double val )
+{
+    const double eps = 1e-5;
+
+    val = MAX( val, eps );
+    val = MIN( val, 1. - eps );
+    return log( val/(1. - val) );
+}
+
+
+CvBoostParams::CvBoostParams()
+{
+    boost_type = CvBoost::REAL;
+    weak_count = 100;
+    weight_trim_rate = 0.95;
+    cv_folds = 0;
+    max_depth = 1;
+}
+
+
+CvBoostParams::CvBoostParams( int _boost_type, int _weak_count,
+                                        double _weight_trim_rate, int _max_depth,
+                                        bool _use_surrogates, const float* _priors )
+{
+    boost_type = _boost_type;
+    weak_count = _weak_count;
+    weight_trim_rate = _weight_trim_rate;
+    split_criteria = CvBoost::DEFAULT;
+    cv_folds = 0;
+    max_depth = _max_depth;
+    use_surrogates = _use_surrogates;
+    priors = _priors;
+}
+
+
+
+///////////////////////////////// CvBoostTree ///////////////////////////////////
+
+CvBoostTree::CvBoostTree()
+{
+    ensemble = 0;
+}
+
+
+CvBoostTree::~CvBoostTree()
+{
+    clear();
+}
+
+
+void
+CvBoostTree::clear()
+{
+    CvDTree::clear();
+    ensemble = 0;
+}
+
+
+bool
+CvBoostTree::train( CvDTreeTrainData* _train_data,
+                    const CvMat* _subsample_idx, CvBoost* _ensemble )
+{
+    clear();
+    ensemble = _ensemble;
+    data = _train_data;
+    data->shared = true;
+    return do_train( _subsample_idx );
+}
+
+
+bool
+CvBoostTree::train( const CvMat*, int, const CvMat*, const CvMat*,
+                    const CvMat*, const CvMat*, const CvMat*, CvDTreeParams )
+{
+    assert(0);
+    return false;
+}
+
+
+bool
+CvBoostTree::train( CvDTreeTrainData*, const CvMat* )
+{
+    assert(0);
+    return false;
+}
+
+
+void
+CvBoostTree::scale( double _scale )
+{
+    CvDTreeNode* node = root;
+
+    // traverse the tree and scale all the node values
+    for(;;)
+    {
+        CvDTreeNode* parent;
+        for(;;)
+        {
+            node->value *= _scale;
+            if( !node->left )
+                break;
+            node = node->left;
+        }
+
+        for( parent = node->parent; parent && parent->right == node;
+            node = parent, parent = parent->parent )
+            ;
+
+        if( !parent )
+            break;
+
+        node = parent->right;
+    }
+}
+
+
+void
+CvBoostTree::try_split_node( CvDTreeNode* node )
+{
+    CvDTree::try_split_node( node );
+
+    if( !node->left )
+    {
+        // if the node has not been split,
+        // store the responses for the corresponding training samples
+        double* weak_eval = ensemble->get_weak_response()->data.db;
+        cv::AutoBuffer<int> inn_buf(node->sample_count);
+        const int* labels = data->get_cv_labels( node, (int*)inn_buf );
+        int i, count = node->sample_count;
+        double value = node->value;
+
+        for( i = 0; i < count; i++ )
+            weak_eval[labels[i]] = value;
+    }
+}
+
+
+double
+CvBoostTree::calc_node_dir( CvDTreeNode* node )
+{
+    char* dir = (char*)data->direction->data.ptr;
+    const double* weights = ensemble->get_subtree_weights()->data.db;
+    int i, n = node->sample_count, vi = node->split->var_idx;
+    double L, R;
+
+    assert( !node->split->inversed );
+
+    if( data->get_var_type(vi) >= 0 ) // split on categorical var
+    {
+        cv::AutoBuffer<int> inn_buf(n);
+        const int* cat_labels = data->get_cat_var_data( node, vi, (int*)inn_buf );
+        const int* subset = node->split->subset;
+        double sum = 0, sum_abs = 0;
+
+        for( i = 0; i < n; i++ )
+        {
+            int idx = ((cat_labels[i] == 65535) && data->is_buf_16u) ? -1 : cat_labels[i];
+            double w = weights[i];
+            int d = idx >= 0 ? CV_DTREE_CAT_DIR(idx,subset) : 0;
+            sum += d*w; sum_abs += (d & 1)*w;
+            dir[i] = (char)d;
+        }
+
+        R = (sum_abs + sum) * 0.5;
+        L = (sum_abs - sum) * 0.5;
+    }
+    else // split on ordered var
+    {
+        cv::AutoBuffer<uchar> inn_buf(2*n*sizeof(int)+n*sizeof(float));
+        float* values_buf = (float*)(uchar*)inn_buf;
+        int* sorted_indices_buf = (int*)(values_buf + n);
+        int* sample_indices_buf = sorted_indices_buf + n;
+        const float* values = 0;
+        const int* sorted_indices = 0;
+        data->get_ord_var_data( node, vi, values_buf, sorted_indices_buf, &values, &sorted_indices, sample_indices_buf );
+        int split_point = node->split->ord.split_point;
+        int n1 = node->get_num_valid(vi);
+
+        assert( 0 <= split_point && split_point < n1-1 );
+        L = R = 0;
+
+        for( i = 0; i <= split_point; i++ )
+        {
+            int idx = sorted_indices[i];
+            double w = weights[idx];
+            dir[idx] = (char)-1;
+            L += w;
+        }
+
+        for( ; i < n1; i++ )
+        {
+            int idx = sorted_indices[i];
+            double w = weights[idx];
+            dir[idx] = (char)1;
+            R += w;
+        }
+
+        for( ; i < n; i++ )
+            dir[sorted_indices[i]] = (char)0;
+    }
+
+    node->maxlr = MAX( L, R );
+    return node->split->quality/(L + R);
+}
+
+
+CvDTreeSplit*
+CvBoostTree::find_split_ord_class( CvDTreeNode* node, int vi, float init_quality,
+                                    CvDTreeSplit* _split, uchar* _ext_buf )
+{
+    const float epsilon = FLT_EPSILON*2;
+
+    const double* weights = ensemble->get_subtree_weights()->data.db;
+    int n = node->sample_count;
+    int n1 = node->get_num_valid(vi);
+
+    cv::AutoBuffer<uchar> inn_buf;
+    if( !_ext_buf )
+        inn_buf.allocate(n*(3*sizeof(int)+sizeof(float)));
+    uchar* ext_buf = _ext_buf ? _ext_buf : (uchar*)inn_buf;
+    float* values_buf = (float*)ext_buf;
+    int* sorted_indices_buf = (int*)(values_buf + n);
+    int* sample_indices_buf = sorted_indices_buf + n;
+    const float* values = 0;
+    const int* sorted_indices = 0;
+    data->get_ord_var_data( node, vi, values_buf, sorted_indices_buf, &values, &sorted_indices, sample_indices_buf );
+    int* responses_buf = sorted_indices_buf + n;
+    const int* responses = data->get_class_labels( node, responses_buf );
+    const double* rcw0 = weights + n;
+    double lcw[2] = {0,0}, rcw[2];
+    int i, best_i = -1;
+    double best_val = init_quality;
+    int boost_type = ensemble->get_params().boost_type;
+    int split_criteria = ensemble->get_params().split_criteria;
+
+    rcw[0] = rcw0[0]; rcw[1] = rcw0[1];
+    for( i = n1; i < n; i++ )
+    {
+        int idx = sorted_indices[i];
+        double w = weights[idx];
+        rcw[responses[idx]] -= w;
+    }
+
+    if( split_criteria != CvBoost::GINI && split_criteria != CvBoost::MISCLASS )
+        split_criteria = boost_type == CvBoost::DISCRETE ? CvBoost::MISCLASS : CvBoost::GINI;
+
+    if( split_criteria == CvBoost::GINI )
+    {
+        double L = 0, R = rcw[0] + rcw[1];
+        double lsum2 = 0, rsum2 = rcw[0]*rcw[0] + rcw[1]*rcw[1];
+
+        for( i = 0; i < n1 - 1; i++ )
+        {
+            int idx = sorted_indices[i];
+            double w = weights[idx], w2 = w*w;
+            double lv, rv;
+            idx = responses[idx];
+            L += w; R -= w;
+            lv = lcw[idx]; rv = rcw[idx];
+            lsum2 += 2*lv*w + w2;
+            rsum2 -= 2*rv*w - w2;
+            lcw[idx] = lv + w; rcw[idx] = rv - w;
+
+            if( values[i] + epsilon < values[i+1] )
+            {
+                double val = (lsum2*R + rsum2*L)/(L*R);
+                if( best_val < val )
+                {
+                    best_val = val;
+                    best_i = i;
+                }
+            }
+        }
+    }
+    else
+    {
+        for( i = 0; i < n1 - 1; i++ )
+        {
+            int idx = sorted_indices[i];
+            double w = weights[idx];
+            idx = responses[idx];
+            lcw[idx] += w;
+            rcw[idx] -= w;
+
+            if( values[i] + epsilon < values[i+1] )
+            {
+                double val = lcw[0] + rcw[1], val2 = lcw[1] + rcw[0];
+                val = MAX(val, val2);
+                if( best_val < val )
+                {
+                    best_val = val;
+                    best_i = i;
+                }
+            }
+        }
+    }
+
+    CvDTreeSplit* split = 0;
+    if( best_i >= 0 )
+    {
+        split = _split ? _split : data->new_split_ord( 0, 0.0f, 0, 0, 0.0f );
+        split->var_idx = vi;
+        split->ord.c = (values[best_i] + values[best_i+1])*0.5f;
+        split->ord.split_point = best_i;
+        split->inversed = 0;
+        split->quality = (float)best_val;
+    }
+    return split;
+}
+
+template<typename T>
+class LessThanPtr
+{
+public:
+    bool operator()(T* a, T* b) const { return *a < *b; }
+};
+
+CvDTreeSplit*
+CvBoostTree::find_split_cat_class( CvDTreeNode* node, int vi, float init_quality, CvDTreeSplit* _split, uchar* _ext_buf )
+{
+    int ci = data->get_var_type(vi);
+    int n = node->sample_count;
+    int mi = data->cat_count->data.i[ci];
+
+    int base_size = (2*mi+3)*sizeof(double) + mi*sizeof(double*);
+    cv::AutoBuffer<uchar> inn_buf((2*mi+3)*sizeof(double) + mi*sizeof(double*));
+    if( !_ext_buf)
+        inn_buf.allocate( base_size + 2*n*sizeof(int) );
+    uchar* base_buf = (uchar*)inn_buf;
+    uchar* ext_buf = _ext_buf ? _ext_buf : base_buf + base_size;
+
+    int* cat_labels_buf = (int*)ext_buf;
+    const int* cat_labels = data->get_cat_var_data(node, vi, cat_labels_buf);
+    int* responses_buf = cat_labels_buf + n;
+    const int* responses = data->get_class_labels(node, responses_buf);
+    double lcw[2]={0,0}, rcw[2]={0,0};
+
+    double* cjk = (double*)cv::alignPtr(base_buf,sizeof(double))+2;
+    const double* weights = ensemble->get_subtree_weights()->data.db;
+    double** dbl_ptr = (double**)(cjk + 2*mi);
+    int i, j, k, idx;
+    double L = 0, R;
+    double best_val = init_quality;
+    int best_subset = -1, subset_i;
+    int boost_type = ensemble->get_params().boost_type;
+    int split_criteria = ensemble->get_params().split_criteria;
+
+    // init array of counters:
+    // c_{jk} - number of samples that have vi-th input variable = j and response = k.
+    for( j = -1; j < mi; j++ )
+        cjk[j*2] = cjk[j*2+1] = 0;
+
+    for( i = 0; i < n; i++ )
+    {
+        double w = weights[i];
+        j = ((cat_labels[i] == 65535) && data->is_buf_16u) ? -1 : cat_labels[i];
+        k = responses[i];
+        cjk[j*2 + k] += w;
+    }
+
+    for( j = 0; j < mi; j++ )
+    {
+        rcw[0] += cjk[j*2];
+        rcw[1] += cjk[j*2+1];
+        dbl_ptr[j] = cjk + j*2 + 1;
+    }
+
+    R = rcw[0] + rcw[1];
+
+    if( split_criteria != CvBoost::GINI && split_criteria != CvBoost::MISCLASS )
+        split_criteria = boost_type == CvBoost::DISCRETE ? CvBoost::MISCLASS : CvBoost::GINI;
+
+    // sort rows of c_jk by increasing c_j,1
+    // (i.e. by the weight of samples in j-th category that belong to class 1)
+    std::sort(dbl_ptr, dbl_ptr + mi, LessThanPtr<double>());
+
+    for( subset_i = 0; subset_i < mi-1; subset_i++ )
+    {
+        idx = (int)(dbl_ptr[subset_i] - cjk)/2;
+        const double* crow = cjk + idx*2;
+        double w0 = crow[0], w1 = crow[1];
+        double weight = w0 + w1;
+
+        if( weight < FLT_EPSILON )
+            continue;
+
+        lcw[0] += w0; rcw[0] -= w0;
+        lcw[1] += w1; rcw[1] -= w1;
+
+        if( split_criteria == CvBoost::GINI )
+        {
+            double lsum2 = lcw[0]*lcw[0] + lcw[1]*lcw[1];
+            double rsum2 = rcw[0]*rcw[0] + rcw[1]*rcw[1];
+
+            L += weight;
+            R -= weight;
+
+            if( L > FLT_EPSILON && R > FLT_EPSILON )
+            {
+                double val = (lsum2*R + rsum2*L)/(L*R);
+                if( best_val < val )
+                {
+                    best_val = val;
+                    best_subset = subset_i;
+                }
+            }
+        }
+        else
+        {
+            double val = lcw[0] + rcw[1];
+            double val2 = lcw[1] + rcw[0];
+
+            val = MAX(val, val2);
+            if( best_val < val )
+            {
+                best_val = val;
+                best_subset = subset_i;
+            }
+        }
+    }
+
+    CvDTreeSplit* split = 0;
+    if( best_subset >= 0 )
+    {
+        split = _split ? _split : data->new_split_cat( 0, -1.0f);
+        split->var_idx = vi;
+        split->quality = (float)best_val;
+        memset( split->subset, 0, (data->max_c_count + 31)/32 * sizeof(int));
+        for( i = 0; i <= best_subset; i++ )
+        {
+            idx = (int)(dbl_ptr[i] - cjk) >> 1;
+            split->subset[idx >> 5] |= 1 << (idx & 31);
+        }
+    }
+    return split;
+}
+
+
+CvDTreeSplit*
+CvBoostTree::find_split_ord_reg( CvDTreeNode* node, int vi, float init_quality, CvDTreeSplit* _split, uchar* _ext_buf )
+{
+    const float epsilon = FLT_EPSILON*2;
+    const double* weights = ensemble->get_subtree_weights()->data.db;
+    int n = node->sample_count;
+    int n1 = node->get_num_valid(vi);
+
+    cv::AutoBuffer<uchar> inn_buf;
+    if( !_ext_buf )
+        inn_buf.allocate(2*n*(sizeof(int)+sizeof(float)));
+    uchar* ext_buf = _ext_buf ? _ext_buf : (uchar*)inn_buf;
+
+    float* values_buf = (float*)ext_buf;
+    int* indices_buf = (int*)(values_buf + n);
+    int* sample_indices_buf = indices_buf + n;
+    const float* values = 0;
+    const int* indices = 0;
+    data->get_ord_var_data( node, vi, values_buf, indices_buf, &values, &indices, sample_indices_buf );
+    float* responses_buf = (float*)(indices_buf + n);
+    const float* responses = data->get_ord_responses( node, responses_buf, sample_indices_buf );
+
+    int i, best_i = -1;
+    double L = 0, R = weights[n];
+    double best_val = init_quality, lsum = 0, rsum = node->value*R;
+
+    // compensate for missing values
+    for( i = n1; i < n; i++ )
+    {
+        int idx = indices[i];
+        double w = weights[idx];
+        rsum -= responses[idx]*w;
+        R -= w;
+    }
+
+    // find the optimal split
+    for( i = 0; i < n1 - 1; i++ )
+    {
+        int idx = indices[i];
+        double w = weights[idx];
+        double t = responses[idx]*w;
+        L += w; R -= w;
+        lsum += t; rsum -= t;
+
+        if( values[i] + epsilon < values[i+1] )
+        {
+            double val = (lsum*lsum*R + rsum*rsum*L)/(L*R);
+            if( best_val < val )
+            {
+                best_val = val;
+                best_i = i;
+            }
+        }
+    }
+
+    CvDTreeSplit* split = 0;
+    if( best_i >= 0 )
+    {
+        split = _split ? _split : data->new_split_ord( 0, 0.0f, 0, 0, 0.0f );
+        split->var_idx = vi;
+        split->ord.c = (values[best_i] + values[best_i+1])*0.5f;
+        split->ord.split_point = best_i;
+        split->inversed = 0;
+        split->quality = (float)best_val;
+    }
+    return split;
+}
+
+
+CvDTreeSplit*
+CvBoostTree::find_split_cat_reg( CvDTreeNode* node, int vi, float init_quality, CvDTreeSplit* _split, uchar* _ext_buf )
+{
+    const double* weights = ensemble->get_subtree_weights()->data.db;
+    int ci = data->get_var_type(vi);
+    int n = node->sample_count;
+    int mi = data->cat_count->data.i[ci];
+    int base_size = (2*mi+3)*sizeof(double) + mi*sizeof(double*);
+    cv::AutoBuffer<uchar> inn_buf(base_size);
+    if( !_ext_buf )
+        inn_buf.allocate(base_size + n*(2*sizeof(int) + sizeof(float)));
+    uchar* base_buf = (uchar*)inn_buf;
+    uchar* ext_buf = _ext_buf ? _ext_buf : base_buf + base_size;
+
+    int* cat_labels_buf = (int*)ext_buf;
+    const int* cat_labels = data->get_cat_var_data(node, vi, cat_labels_buf);
+    float* responses_buf = (float*)(cat_labels_buf + n);
+    int* sample_indices_buf = (int*)(responses_buf + n);
+    const float* responses = data->get_ord_responses(node, responses_buf, sample_indices_buf);
+
+    double* sum = (double*)cv::alignPtr(base_buf,sizeof(double)) + 1;
+    double* counts = sum + mi + 1;
+    double** sum_ptr = (double**)(counts + mi);
+    double L = 0, R = 0, best_val = init_quality, lsum = 0, rsum = 0;
+    int i, best_subset = -1, subset_i;
+
+    for( i = -1; i < mi; i++ )
+        sum[i] = counts[i] = 0;
+
+    // calculate sum response and weight of each category of the input var
+    for( i = 0; i < n; i++ )
+    {
+        int idx = ((cat_labels[i] == 65535) && data->is_buf_16u) ? -1 : cat_labels[i];
+        double w = weights[i];
+        double s = sum[idx] + responses[i]*w;
+        double nc = counts[idx] + w;
+        sum[idx] = s;
+        counts[idx] = nc;
+    }
+
+    // calculate average response in each category
+    for( i = 0; i < mi; i++ )
+    {
+        R += counts[i];
+        rsum += sum[i];
+        sum[i] = fabs(counts[i]) > DBL_EPSILON ? sum[i]/counts[i] : 0;
+        sum_ptr[i] = sum + i;
+    }
+
+    std::sort(sum_ptr, sum_ptr + mi, LessThanPtr<double>());
+
+    // revert back to unnormalized sums
+    // (there should be a very little loss in accuracy)
+    for( i = 0; i < mi; i++ )
+        sum[i] *= counts[i];
+
+    for( subset_i = 0; subset_i < mi-1; subset_i++ )
+    {
+        int idx = (int)(sum_ptr[subset_i] - sum);
+        double ni = counts[idx];
+
+        if( ni > FLT_EPSILON )
+        {
+            double s = sum[idx];
+            lsum += s; L += ni;
+            rsum -= s; R -= ni;
+
+            if( L > FLT_EPSILON && R > FLT_EPSILON )
+            {
+                double val = (lsum*lsum*R + rsum*rsum*L)/(L*R);
+                if( best_val < val )
+                {
+                    best_val = val;
+                    best_subset = subset_i;
+                }
+            }
+        }
+    }
+
+    CvDTreeSplit* split = 0;
+    if( best_subset >= 0 )
+    {
+        split = _split ? _split : data->new_split_cat( 0, -1.0f);
+        split->var_idx = vi;
+        split->quality = (float)best_val;
+        memset( split->subset, 0, (data->max_c_count + 31)/32 * sizeof(int));
+        for( i = 0; i <= best_subset; i++ )
+        {
+            int idx = (int)(sum_ptr[i] - sum);
+            split->subset[idx >> 5] |= 1 << (idx & 31);
+        }
+    }
+    return split;
+}
+
+
+CvDTreeSplit*
+CvBoostTree::find_surrogate_split_ord( CvDTreeNode* node, int vi, uchar* _ext_buf )
+{
+    const float epsilon = FLT_EPSILON*2;
+    int n = node->sample_count;
+    cv::AutoBuffer<uchar> inn_buf;
+    if( !_ext_buf )
+        inn_buf.allocate(n*(2*sizeof(int)+sizeof(float)));
+    uchar* ext_buf = _ext_buf ? _ext_buf : (uchar*)inn_buf;
+    float* values_buf = (float*)ext_buf;
+    int* indices_buf = (int*)(values_buf + n);
+    int* sample_indices_buf = indices_buf + n;
+    const float* values = 0;
+    const int* indices = 0;
+    data->get_ord_var_data( node, vi, values_buf, indices_buf, &values, &indices, sample_indices_buf );
+
+    const double* weights = ensemble->get_subtree_weights()->data.db;
+    const char* dir = (char*)data->direction->data.ptr;
+    int n1 = node->get_num_valid(vi);
+    // LL - number of samples that both the primary and the surrogate splits send to the left
+    // LR - ... primary split sends to the left and the surrogate split sends to the right
+    // RL - ... primary split sends to the right and the surrogate split sends to the left
+    // RR - ... both send to the right
+    int i, best_i = -1, best_inversed = 0;
+    double best_val;
+    double LL = 0, RL = 0, LR, RR;
+    double worst_val = node->maxlr;
+    double sum = 0, sum_abs = 0;
+    best_val = worst_val;
+
+    for( i = 0; i < n1; i++ )
+    {
+        int idx = indices[i];
+        double w = weights[idx];
+        int d = dir[idx];
+        sum += d*w; sum_abs += (d & 1)*w;
+    }
+
+    // sum_abs = R + L; sum = R - L
+    RR = (sum_abs + sum)*0.5;
+    LR = (sum_abs - sum)*0.5;
+
+    // initially all the samples are sent to the right by the surrogate split,
+    // LR of them are sent to the left by primary split, and RR - to the right.
+    // now iteratively compute LL, LR, RL and RR for every possible surrogate split value.
+    for( i = 0; i < n1 - 1; i++ )
+    {
+        int idx = indices[i];
+        double w = weights[idx];
+        int d = dir[idx];
+
+        if( d < 0 )
+        {
+            LL += w; LR -= w;
+            if( LL + RR > best_val && values[i] + epsilon < values[i+1] )
+            {
+                best_val = LL + RR;
+                best_i = i; best_inversed = 0;
+            }
+        }
+        else if( d > 0 )
+        {
+            RL += w; RR -= w;
+            if( RL + LR > best_val && values[i] + epsilon < values[i+1] )
+            {
+                best_val = RL + LR;
+                best_i = i; best_inversed = 1;
+            }
+        }
+    }
+
+    return best_i >= 0 && best_val > node->maxlr ? data->new_split_ord( vi,
+        (values[best_i] + values[best_i+1])*0.5f, best_i,
+        best_inversed, (float)best_val ) : 0;
+}
+
+
+CvDTreeSplit*
+CvBoostTree::find_surrogate_split_cat( CvDTreeNode* node, int vi, uchar* _ext_buf )
+{
+    const char* dir = (char*)data->direction->data.ptr;
+    const double* weights = ensemble->get_subtree_weights()->data.db;
+    int n = node->sample_count;
+    int i, mi = data->cat_count->data.i[data->get_var_type(vi)];
+
+    int base_size = (2*mi+3)*sizeof(double);
+    cv::AutoBuffer<uchar> inn_buf(base_size);
+    if( !_ext_buf )
+        inn_buf.allocate(base_size + n*sizeof(int));
+    uchar* ext_buf = _ext_buf ? _ext_buf : (uchar*)inn_buf;
+    int* cat_labels_buf = (int*)ext_buf;
+    const int* cat_labels = data->get_cat_var_data(node, vi, cat_labels_buf);
+
+    // LL - number of samples that both the primary and the surrogate splits send to the left
+    // LR - ... primary split sends to the left and the surrogate split sends to the right
+    // RL - ... primary split sends to the right and the surrogate split sends to the left
+    // RR - ... both send to the right
+    CvDTreeSplit* split = data->new_split_cat( vi, 0 );
+    double best_val = 0;
+    double* lc = (double*)cv::alignPtr(cat_labels_buf + n, sizeof(double)) + 1;
+    double* rc = lc + mi + 1;
+
+    for( i = -1; i < mi; i++ )
+        lc[i] = rc[i] = 0;
+
+    // 1. for each category calculate the weight of samples
+    // sent to the left (lc) and to the right (rc) by the primary split
+    for( i = 0; i < n; i++ )
+    {
+        int idx = ((cat_labels[i] == 65535) && data->is_buf_16u) ? -1 : cat_labels[i];
+        double w = weights[i];
+        int d = dir[i];
+        double sum = lc[idx] + d*w;
+        double sum_abs = rc[idx] + (d & 1)*w;
+        lc[idx] = sum; rc[idx] = sum_abs;
+    }
+
+    for( i = 0; i < mi; i++ )
+    {
+        double sum = lc[i];
+        double sum_abs = rc[i];
+        lc[i] = (sum_abs - sum) * 0.5;
+        rc[i] = (sum_abs + sum) * 0.5;
+    }
+
+    // 2. now form the split.
+    // in each category send all the samples to the same direction as majority
+    for( i = 0; i < mi; i++ )
+    {
+        double lval = lc[i], rval = rc[i];
+        if( lval > rval )
+        {
+            split->subset[i >> 5] |= 1 << (i & 31);
+            best_val += lval;
+        }
+        else
+            best_val += rval;
+    }
+
+    split->quality = (float)best_val;
+    if( split->quality <= node->maxlr )
+        cvSetRemoveByPtr( data->split_heap, split ), split = 0;
+
+    return split;
+}
+
+
+void
+CvBoostTree::calc_node_value( CvDTreeNode* node )
+{
+    int i, n = node->sample_count;
+    const double* weights = ensemble->get_weights()->data.db;
+    cv::AutoBuffer<uchar> inn_buf(n*(sizeof(int) + ( data->is_classifier ? sizeof(int) : sizeof(int) + sizeof(float))));
+    int* labels_buf = (int*)(uchar*)inn_buf;
+    const int* labels = data->get_cv_labels(node, labels_buf);
+    double* subtree_weights = ensemble->get_subtree_weights()->data.db;
+    double rcw[2] = {0,0};
+    int boost_type = ensemble->get_params().boost_type;
+
+    if( data->is_classifier )
+    {
+        int* _responses_buf = labels_buf + n;
+        const int* _responses = data->get_class_labels(node, _responses_buf);
+        int m = data->get_num_classes();
+        int* cls_count = data->counts->data.i;
+        for( int k = 0; k < m; k++ )
+            cls_count[k] = 0;
+
+        for( i = 0; i < n; i++ )
+        {
+            int idx = labels[i];
+            double w = weights[idx];
+            int r = _responses[i];
+            rcw[r] += w;
+            cls_count[r]++;
+            subtree_weights[i] = w;
+        }
+
+        node->class_idx = rcw[1] > rcw[0];
+
+        if( boost_type == CvBoost::DISCRETE )
+        {
+            // ignore cat_map for responses, and use {-1,1},
+            // as the whole ensemble response is computes as sign(sum_i(weak_response_i)
+            node->value = node->class_idx*2 - 1;
+        }
+        else
+        {
+            double p = rcw[1]/(rcw[0] + rcw[1]);
+            assert( boost_type == CvBoost::REAL );
+
+            // store log-ratio of the probability
+            node->value = 0.5*log_ratio(p);
+        }
+    }
+    else
+    {
+        // in case of regression tree:
+        //  * node value is 1/n*sum_i(Y_i), where Y_i is i-th response,
+        //    n is the number of samples in the node.
+        //  * node risk is the sum of squared errors: sum_i((Y_i - <node_value>)^2)
+        double sum = 0, sum2 = 0, iw;
+        float* values_buf = (float*)(labels_buf + n);
+        int* sample_indices_buf = (int*)(values_buf + n);
+        const float* values = data->get_ord_responses(node, values_buf, sample_indices_buf);
+
+        for( i = 0; i < n; i++ )
+        {
+            int idx = labels[i];
+            double w = weights[idx]/*priors[values[i] > 0]*/;
+            double t = values[i];
+            rcw[0] += w;
+            subtree_weights[i] = w;
+            sum += t*w;
+            sum2 += t*t*w;
+        }
+
+        iw = 1./rcw[0];
+        node->value = sum*iw;
+        node->node_risk = sum2 - (sum*iw)*sum;
+
+        // renormalize the risk, as in try_split_node the unweighted formula
+        // sqrt(risk)/n is used, rather than sqrt(risk)/sum(weights_i)
+        node->node_risk *= n*iw*n*iw;
+    }
+
+    // store summary weights
+    subtree_weights[n] = rcw[0];
+    subtree_weights[n+1] = rcw[1];
+}
+
+
+void CvBoostTree::read( CvFileStorage* fs, CvFileNode* fnode, CvBoost* _ensemble, CvDTreeTrainData* _data )
+{
+    CvDTree::read( fs, fnode, _data );
+    ensemble = _ensemble;
+}
+
+void CvBoostTree::read( CvFileStorage*, CvFileNode* )
+{
+    assert(0);
+}
+
+void CvBoostTree::read( CvFileStorage* _fs, CvFileNode* _node,
+                        CvDTreeTrainData* _data )
+{
+    CvDTree::read( _fs, _node, _data );
+}
+
+
+/////////////////////////////////// CvBoost /////////////////////////////////////
+
+CvBoost::CvBoost()
+{
+    data = 0;
+    weak = 0;
+    default_model_name = "my_boost_tree";
+
+    active_vars = active_vars_abs = orig_response = sum_response = weak_eval =
+        subsample_mask = weights = subtree_weights = 0;
+    have_active_cat_vars = have_subsample = false;
+
+    clear();
+}
+
+
+void CvBoost::prune( CvSlice slice )
+{
+    if( weak && weak->total > 0 )
+    {
+        CvSeqReader reader;
+        int i, count = cvSliceLength( slice, weak );
+
+        cvStartReadSeq( weak, &reader );
+        cvSetSeqReaderPos( &reader, slice.start_index );
+
+        for( i = 0; i < count; i++ )
+        {
+            CvBoostTree* w;
+            CV_READ_SEQ_ELEM( w, reader );
+            delete w;
+        }
+
+        cvSeqRemoveSlice( weak, slice );
+    }
+}
+
+
+void CvBoost::clear()
+{
+    if( weak )
+    {
+        prune( CV_WHOLE_SEQ );
+        cvReleaseMemStorage( &weak->storage );
+    }
+    if( data )
+        delete data;
+    weak = 0;
+    data = 0;
+    cvReleaseMat( &active_vars );
+    cvReleaseMat( &active_vars_abs );
+    cvReleaseMat( &orig_response );
+    cvReleaseMat( &sum_response );
+    cvReleaseMat( &weak_eval );
+    cvReleaseMat( &subsample_mask );
+    cvReleaseMat( &weights );
+    cvReleaseMat( &subtree_weights );
+
+    have_subsample = false;
+}
+
+
+CvBoost::~CvBoost()
+{
+    clear();
+}
+
+
+CvBoost::CvBoost( const CvMat* _train_data, int _tflag,
+                  const CvMat* _responses, const CvMat* _var_idx,
+                  const CvMat* _sample_idx, const CvMat* _var_type,
+                  const CvMat* _missing_mask, CvBoostParams _params )
+{
+    weak = 0;
+    data = 0;
+    default_model_name = "my_boost_tree";
+
+    active_vars = active_vars_abs = orig_response = sum_response = weak_eval =
+        subsample_mask = weights = subtree_weights = 0;
+
+    train( _train_data, _tflag, _responses, _var_idx, _sample_idx,
+           _var_type, _missing_mask, _params );
+}
+
+
+bool
+CvBoost::set_params( const CvBoostParams& _params )
+{
+    bool ok = false;
+
+    CV_FUNCNAME( "CvBoost::set_params" );
+
+    __BEGIN__;
+
+    params = _params;
+    if( params.boost_type != DISCRETE && params.boost_type != REAL &&
+        params.boost_type != LOGIT && params.boost_type != GENTLE )
+        CV_ERROR( CV_StsBadArg, "Unknown/unsupported boosting type" );
+
+    params.weak_count = MAX( params.weak_count, 1 );
+    params.weight_trim_rate = MAX( params.weight_trim_rate, 0. );
+    params.weight_trim_rate = MIN( params.weight_trim_rate, 1. );
+    if( params.weight_trim_rate < FLT_EPSILON )
+        params.weight_trim_rate = 1.f;
+
+    if( params.boost_type == DISCRETE &&
+        params.split_criteria != GINI && params.split_criteria != MISCLASS )
+        params.split_criteria = MISCLASS;
+    if( params.boost_type == REAL &&
+        params.split_criteria != GINI && params.split_criteria != MISCLASS )
+        params.split_criteria = GINI;
+    if( (params.boost_type == LOGIT || params.boost_type == GENTLE) &&
+        params.split_criteria != SQERR )
+        params.split_criteria = SQERR;
+
+    ok = true;
+
+    __END__;
+
+    return ok;
+}
+
+
+bool
+CvBoost::train( const CvMat* _train_data, int _tflag,
+              const CvMat* _responses, const CvMat* _var_idx,
+              const CvMat* _sample_idx, const CvMat* _var_type,
+              const CvMat* _missing_mask,
+              CvBoostParams _params, bool _update )
+{
+    bool ok = false;
+    CvMemStorage* storage = 0;
+
+    CV_FUNCNAME( "CvBoost::train" );
+
+    __BEGIN__;
+
+    int i;
+
+    set_params( _params );
+
+    cvReleaseMat( &active_vars );
+    cvReleaseMat( &active_vars_abs );
+
+    if( !_update || !data )
+    {
+        clear();
+        data = new CvDTreeTrainData( _train_data, _tflag, _responses, _var_idx,
+            _sample_idx, _var_type, _missing_mask, _params, true, true );
+
+        if( data->get_num_classes() != 2 )
+            CV_ERROR( CV_StsNotImplemented,
+            "Boosted trees can only be used for 2-class classification." );
+        CV_CALL( storage = cvCreateMemStorage() );
+        weak = cvCreateSeq( 0, sizeof(CvSeq), sizeof(CvBoostTree*), storage );
+        storage = 0;
+    }
+    else
+    {
+        data->set_data( _train_data, _tflag, _responses, _var_idx,
+            _sample_idx, _var_type, _missing_mask, _params, true, true, true );
+    }
+
+    if ( (_params.boost_type == LOGIT) || (_params.boost_type == GENTLE) )
+        data->do_responses_copy();
+
+    update_weights( 0 );
+
+    for( i = 0; i < params.weak_count; i++ )
+    {
+        CvBoostTree* tree = new CvBoostTree;
+        if( !tree->train( data, subsample_mask, this ) )
+        {
+            delete tree;
+            break;
+        }
+        //cvCheckArr( get_weak_response());
+        cvSeqPush( weak, &tree );
+        update_weights( tree );
+        trim_weights();
+        if( cvCountNonZero(subsample_mask) == 0 )
+            break;
+    }
+
+    if(weak->total > 0)
+    {
+        get_active_vars(); // recompute active_vars* maps and condensed_idx's in the splits.
+        data->is_classifier = true;
+        data->free_train_data();
+        ok = true;
+    }
+    else
+        clear();
+
+    __END__;
+
+    return ok;
+}
+
+bool CvBoost::train( CvMLData* _data,
+             CvBoostParams _params,
+             bool update )
+{
+    bool result = false;
+
+    CV_FUNCNAME( "CvBoost::train" );
+
+    __BEGIN__;
+
+    const CvMat* values = _data->get_values();
+    const CvMat* response = _data->get_responses();
+    const CvMat* missing = _data->get_missing();
+    const CvMat* var_types = _data->get_var_types();
+    const CvMat* train_sidx = _data->get_train_sample_idx();
+    const CvMat* var_idx = _data->get_var_idx();
+
+    CV_CALL( result = train( values, CV_ROW_SAMPLE, response, var_idx,
+        train_sidx, var_types, missing, _params, update ) );
+
+    __END__;
+
+    return result;
+}
+
+void CvBoost::initialize_weights(double (&p)[2])
+{
+    p[0] = 1.;
+    p[1] = 1.;
+}
+
+void
+CvBoost::update_weights( CvBoostTree* tree )
+{
+    CV_FUNCNAME( "CvBoost::update_weights" );
+
+    __BEGIN__;
+
+    int i, n = data->sample_count;
+    double sumw = 0.;
+    int step = 0;
+    float* fdata = 0;
+    int *sample_idx_buf;
+    const int* sample_idx = 0;
+    cv::AutoBuffer<uchar> inn_buf;
+    size_t _buf_size = (params.boost_type == LOGIT) || (params.boost_type == GENTLE) ? (size_t)(data->sample_count)*sizeof(int) : 0;
+    if( !tree )
+        _buf_size += n*sizeof(int);
+    else
+    {
+        if( have_subsample )
+            _buf_size += data->get_length_subbuf()*(sizeof(float)+sizeof(uchar));
+    }
+    inn_buf.allocate(_buf_size);
+    uchar* cur_buf_pos = (uchar*)inn_buf;
+
+    if ( (params.boost_type == LOGIT) || (params.boost_type == GENTLE) )
+    {
+        step = CV_IS_MAT_CONT(data->responses_copy->type) ?
+            1 : data->responses_copy->step / CV_ELEM_SIZE(data->responses_copy->type);
+        fdata = data->responses_copy->data.fl;
+        sample_idx_buf = (int*)cur_buf_pos;
+        cur_buf_pos = (uchar*)(sample_idx_buf + data->sample_count);
+        sample_idx = data->get_sample_indices( data->data_root, sample_idx_buf );
+    }
+    CvMat* dtree_data_buf = data->buf;
+    size_t length_buf_row = data->get_length_subbuf();
+    if( !tree ) // before training the first tree, initialize weights and other parameters
+    {
+        int* class_labels_buf = (int*)cur_buf_pos;
+        cur_buf_pos = (uchar*)(class_labels_buf + n);
+        const int* class_labels = data->get_class_labels(data->data_root, class_labels_buf);
+        // in case of logitboost and gentle adaboost each weak tree is a regression tree,
+        // so we need to convert class labels to floating-point values
+
+        double w0 = 1./ n;
+        double p[2] = { 1., 1. };
+        initialize_weights(p);
+
+        cvReleaseMat( &orig_response );
+        cvReleaseMat( &sum_response );
+        cvReleaseMat( &weak_eval );
+        cvReleaseMat( &subsample_mask );
+        cvReleaseMat( &weights );
+        cvReleaseMat( &subtree_weights );
+
+        CV_CALL( orig_response = cvCreateMat( 1, n, CV_32S ));
+        CV_CALL( weak_eval = cvCreateMat( 1, n, CV_64F ));
+        CV_CALL( subsample_mask = cvCreateMat( 1, n, CV_8U ));
+        CV_CALL( weights = cvCreateMat( 1, n, CV_64F ));
+        CV_CALL( subtree_weights = cvCreateMat( 1, n + 2, CV_64F ));
+
+        if( data->have_priors )
+        {
+            // compute weight scale for each class from their prior probabilities
+            int c1 = 0;
+            for( i = 0; i < n; i++ )
+                c1 += class_labels[i];
+            p[0] = data->priors->data.db[0]*(c1 < n ? 1./(n - c1) : 0.);
+            p[1] = data->priors->data.db[1]*(c1 > 0 ? 1./c1 : 0.);
+            p[0] /= p[0] + p[1];
+            p[1] = 1. - p[0];
+        }
+
+        if (data->is_buf_16u)
+        {
+            unsigned short* labels = (unsigned short*)(dtree_data_buf->data.s + data->data_root->buf_idx*length_buf_row +
+                data->data_root->offset + (data->work_var_count-1)*data->sample_count);
+            for( i = 0; i < n; i++ )
+            {
+                // save original categorical responses {0,1}, convert them to {-1,1}
+                orig_response->data.i[i] = class_labels[i]*2 - 1;
+                // make all the samples active at start.
+                // later, in trim_weights() deactivate/reactive again some, if need
+                subsample_mask->data.ptr[i] = (uchar)1;
+                // make all the initial weights the same.
+                weights->data.db[i] = w0*p[class_labels[i]];
+                // set the labels to find (from within weak tree learning proc)
+                // the particular sample weight, and where to store the response.
+                labels[i] = (unsigned short)i;
+            }
+        }
+        else
+        {
+            int* labels = dtree_data_buf->data.i + data->data_root->buf_idx*length_buf_row +
+                data->data_root->offset + (data->work_var_count-1)*data->sample_count;
+
+            for( i = 0; i < n; i++ )
+            {
+                // save original categorical responses {0,1}, convert them to {-1,1}
+                orig_response->data.i[i] = class_labels[i]*2 - 1;
+                // make all the samples active at start.
+                // later, in trim_weights() deactivate/reactive again some, if need
+                subsample_mask->data.ptr[i] = (uchar)1;
+                // make all the initial weights the same.
+                weights->data.db[i] = w0*p[class_labels[i]];
+                // set the labels to find (from within weak tree learning proc)
+                // the particular sample weight, and where to store the response.
+                labels[i] = i;
+            }
+        }
+
+        if( params.boost_type == LOGIT )
+        {
+            CV_CALL( sum_response = cvCreateMat( 1, n, CV_64F ));
+
+            for( i = 0; i < n; i++ )
+            {
+                sum_response->data.db[i] = 0;
+                fdata[sample_idx[i]*step] = orig_response->data.i[i] > 0 ? 2.f : -2.f;
+            }
+
+            // in case of logitboost each weak tree is a regression tree.
+            // the target function values are recalculated for each of the trees
+            data->is_classifier = false;
+        }
+        else if( params.boost_type == GENTLE )
+        {
+            for( i = 0; i < n; i++ )
+                fdata[sample_idx[i]*step] = (float)orig_response->data.i[i];
+
+            data->is_classifier = false;
+        }
+    }
+    else
+    {
+        // at this moment, for all the samples that participated in the training of the most
+        // recent weak classifier we know the responses. For other samples we need to compute them
+        if( have_subsample )
+        {
+            float* values = (float*)cur_buf_pos;
+            cur_buf_pos = (uchar*)(values + data->get_length_subbuf());
+            uchar* missing = cur_buf_pos;
+            cur_buf_pos = missing + data->get_length_subbuf() * (size_t)CV_ELEM_SIZE(data->buf->type);
+
+            CvMat _sample, _mask;
+
+            // invert the subsample mask
+            cvXorS( subsample_mask, cvScalar(1.), subsample_mask );
+            data->get_vectors( subsample_mask, values, missing, 0 );
+
+            _sample = cvMat( 1, data->var_count, CV_32F );
+            _mask = cvMat( 1, data->var_count, CV_8U );
+
+            // run tree through all the non-processed samples
+            for( i = 0; i < n; i++ )
+                if( subsample_mask->data.ptr[i] )
+                {
+                    _sample.data.fl = values;
+                    _mask.data.ptr = missing;
+                    values += _sample.cols;
+                    missing += _mask.cols;
+                    weak_eval->data.db[i] = tree->predict( &_sample, &_mask, true )->value;
+                }
+        }
+
+        // now update weights and other parameters for each type of boosting
+        if( params.boost_type == DISCRETE )
+        {
+            // Discrete AdaBoost:
+            //   weak_eval[i] (=f(x_i)) is in {-1,1}
+            //   err = sum(w_i*(f(x_i) != y_i))/sum(w_i)
+            //   C = log((1-err)/err)
+            //   w_i *= exp(C*(f(x_i) != y_i))
+
+            double C, err = 0.;
+            double scale[] = { 1., 0. };
+
+            for( i = 0; i < n; i++ )
+            {
+                double w = weights->data.db[i];
+                sumw += w;
+                err += w*(weak_eval->data.db[i] != orig_response->data.i[i]);
+            }
+
+            if( sumw != 0 )
+                err /= sumw;
+            C = err = -log_ratio( err );
+            scale[1] = exp(err);
+
+            sumw = 0;
+            for( i = 0; i < n; i++ )
+            {
+                double w = weights->data.db[i]*
+                    scale[weak_eval->data.db[i] != orig_response->data.i[i]];
+                sumw += w;
+                weights->data.db[i] = w;
+            }
+
+            tree->scale( C );
+        }
+        else if( params.boost_type == REAL )
+        {
+            // Real AdaBoost:
+            //   weak_eval[i] = f(x_i) = 0.5*log(p(x_i)/(1-p(x_i))), p(x_i)=P(y=1|x_i)
+            //   w_i *= exp(-y_i*f(x_i))
+
+            for( i = 0; i < n; i++ )
+                weak_eval->data.db[i] *= -orig_response->data.i[i];
+
+            cvExp( weak_eval, weak_eval );
+
+            for( i = 0; i < n; i++ )
+            {
+                double w = weights->data.db[i]*weak_eval->data.db[i];
+                sumw += w;
+                weights->data.db[i] = w;
+            }
+        }
+        else if( params.boost_type == LOGIT )
+        {
+            // LogitBoost:
+            //   weak_eval[i] = f(x_i) in [-z_max,z_max]
+            //   sum_response = F(x_i).
+            //   F(x_i) += 0.5*f(x_i)
+            //   p(x_i) = exp(F(x_i))/(exp(F(x_i)) + exp(-F(x_i))=1/(1+exp(-2*F(x_i)))
+            //   reuse weak_eval: weak_eval[i] <- p(x_i)
+            //   w_i = p(x_i)*1(1 - p(x_i))
+            //   z_i = ((y_i+1)/2 - p(x_i))/(p(x_i)*(1 - p(x_i)))
+            //   store z_i to the data->data_root as the new target responses
+
+            const double lb_weight_thresh = FLT_EPSILON;
+            const double lb_z_max = 10.;
+            /*float* responses_buf = data->get_resp_float_buf();
+            const float* responses = 0;
+            data->get_ord_responses(data->data_root, responses_buf, &responses);*/
+
+            /*if( weak->total == 7 )
+                putchar('*');*/
+
+            for( i = 0; i < n; i++ )
+            {
+                double s = sum_response->data.db[i] + 0.5*weak_eval->data.db[i];
+                sum_response->data.db[i] = s;
+                weak_eval->data.db[i] = -2*s;
+            }
+
+            cvExp( weak_eval, weak_eval );
+
+            for( i = 0; i < n; i++ )
+            {
+                double p = 1./(1. + weak_eval->data.db[i]);
+                double w = p*(1 - p), z;
+                w = MAX( w, lb_weight_thresh );
+                weights->data.db[i] = w;
+                sumw += w;
+                if( orig_response->data.i[i] > 0 )
+                {
+                    z = 1./p;
+                    fdata[sample_idx[i]*step] = (float)MIN(z, lb_z_max);
+                }
+                else
+                {
+                    z = 1./(1-p);
+                    fdata[sample_idx[i]*step] = (float)-MIN(z, lb_z_max);
+                }
+            }
+        }
+        else
+        {
+            // Gentle AdaBoost:
+            //   weak_eval[i] = f(x_i) in [-1,1]
+            //   w_i *= exp(-y_i*f(x_i))
+            assert( params.boost_type == GENTLE );
+
+            for( i = 0; i < n; i++ )
+                weak_eval->data.db[i] *= -orig_response->data.i[i];
+
+            cvExp( weak_eval, weak_eval );
+
+            for( i = 0; i < n; i++ )
+            {
+                double w = weights->data.db[i] * weak_eval->data.db[i];
+                weights->data.db[i] = w;
+                sumw += w;
+            }
+        }
+    }
+
+    // renormalize weights
+    if( sumw > FLT_EPSILON )
+    {
+        sumw = 1./sumw;
+        for( i = 0; i < n; ++i )
+            weights->data.db[i] *= sumw;
+    }
+
+    __END__;
+}
+
+
+void
+CvBoost::trim_weights()
+{
+    //CV_FUNCNAME( "CvBoost::trim_weights" );
+
+    __BEGIN__;
+
+    int i, count = data->sample_count, nz_count = 0;
+    double sum, threshold;
+
+    if( params.weight_trim_rate <= 0. || params.weight_trim_rate >= 1. )
+        EXIT;
+
+    // use weak_eval as temporary buffer for sorted weights
+    cvCopy( weights, weak_eval );
+
+    std::sort(weak_eval->data.db, weak_eval->data.db + count);
+
+    // as weight trimming occurs immediately after updating the weights,
+    // where they are renormalized, we assume that the weight sum = 1.
+    sum = 1. - params.weight_trim_rate;
+
+    for( i = 0; i < count; i++ )
+    {
+        double w = weak_eval->data.db[i];
+        if( sum <= 0 )
+            break;
+        sum -= w;
+    }
+
+    threshold = i < count ? weak_eval->data.db[i] : DBL_MAX;
+
+    for( i = 0; i < count; i++ )
+    {
+        double w = weights->data.db[i];
+        int f = w >= threshold;
+        subsample_mask->data.ptr[i] = (uchar)f;
+        nz_count += f;
+    }
+
+    have_subsample = nz_count < count;
+
+    __END__;
+}
+
+
+const CvMat*
+CvBoost::get_active_vars( bool absolute_idx )
+{
+    CvMat* mask = 0;
+    CvMat* inv_map = 0;
+    CvMat* result = 0;
+
+    CV_FUNCNAME( "CvBoost::get_active_vars" );
+
+    __BEGIN__;
+
+    if( !weak )
+        CV_ERROR( CV_StsError, "The boosted tree ensemble has not been trained yet" );
+
+    if( !active_vars || !active_vars_abs )
+    {
+        CvSeqReader reader;
+        int i, j, nactive_vars;
+        CvBoostTree* wtree;
+        const CvDTreeNode* node;
+
+        assert(!active_vars && !active_vars_abs);
+        mask = cvCreateMat( 1, data->var_count, CV_8U );
+        inv_map = cvCreateMat( 1, data->var_count, CV_32S );
+        cvZero( mask );
+        cvSet( inv_map, cvScalar(-1) );
+
+        // first pass: compute the mask of used variables
+        cvStartReadSeq( weak, &reader );
+        for( i = 0; i < weak->total; i++ )
+        {
+            CV_READ_SEQ_ELEM(wtree, reader);
+
+            node = wtree->get_root();
+            assert( node != 0 );
+            for(;;)
+            {
+                const CvDTreeNode* parent;
+                for(;;)
+                {
+                    CvDTreeSplit* split = node->split;
+                    for( ; split != 0; split = split->next )
+                        mask->data.ptr[split->var_idx] = 1;
+                    if( !node->left )
+                        break;
+                    node = node->left;
+                }
+
+                for( parent = node->parent; parent && parent->right == node;
+                    node = parent, parent = parent->parent )
+                    ;
+
+                if( !parent )
+                    break;
+
+                node = parent->right;
+            }
+        }
+
+        nactive_vars = cvCountNonZero(mask);
+
+        //if ( nactive_vars > 0 )
+        {
+            active_vars = cvCreateMat( 1, nactive_vars, CV_32S );
+            active_vars_abs = cvCreateMat( 1, nactive_vars, CV_32S );
+
+            have_active_cat_vars = false;
+
+            for( i = j = 0; i < data->var_count; i++ )
+            {
+                if( mask->data.ptr[i] )
+                {
+                    active_vars->data.i[j] = i;
+                    active_vars_abs->data.i[j] = data->var_idx ? data->var_idx->data.i[i] : i;
+                    inv_map->data.i[i] = j;
+                    if( data->var_type->data.i[i] >= 0 )
+                        have_active_cat_vars = true;
+                    j++;
+                }
+            }
+
+
+            // second pass: now compute the condensed indices
+            cvStartReadSeq( weak, &reader );
+            for( i = 0; i < weak->total; i++ )
+            {
+                CV_READ_SEQ_ELEM(wtree, reader);
+                node = wtree->get_root();
+                for(;;)
+                {
+                    const CvDTreeNode* parent;
+                    for(;;)
+                    {
+                        CvDTreeSplit* split = node->split;
+                        for( ; split != 0; split = split->next )
+                        {
+                            split->condensed_idx = inv_map->data.i[split->var_idx];
+                            assert( split->condensed_idx >= 0 );
+                        }
+
+                        if( !node->left )
+                            break;
+                        node = node->left;
+                    }
+
+                    for( parent = node->parent; parent && parent->right == node;
+                        node = parent, parent = parent->parent )
+                        ;
+
+                    if( !parent )
+                        break;
+
+                    node = parent->right;
+                }
+            }
+        }
+    }
+
+    result = absolute_idx ? active_vars_abs : active_vars;
+
+    __END__;
+
+    cvReleaseMat( &mask );
+    cvReleaseMat( &inv_map );
+
+    return result;
+}
+
+
+float
+CvBoost::predict( const CvMat* _sample, const CvMat* _missing,
+                  CvMat* weak_responses, CvSlice slice,
+                  bool raw_mode, bool return_sum ) const
+{
+    float value = -FLT_MAX;
+
+    CvSeqReader reader;
+    double sum = 0;
+    int wstep = 0;
+    const float* sample_data;
+
+    if( !weak )
+        CV_Error( CV_StsError, "The boosted tree ensemble has not been trained yet" );
+
+    if( !CV_IS_MAT(_sample) || CV_MAT_TYPE(_sample->type) != CV_32FC1 ||
+        (_sample->cols != 1 && _sample->rows != 1) ||
+        (_sample->cols + _sample->rows - 1 != data->var_all && !raw_mode) ||
+        (active_vars && _sample->cols + _sample->rows - 1 != active_vars->cols && raw_mode) )
+            CV_Error( CV_StsBadArg,
+        "the input sample must be 1d floating-point vector with the same "
+        "number of elements as the total number of variables or "
+        "as the number of variables used for training" );
+
+    if( _missing )
+    {
+        if( !CV_IS_MAT(_missing) || !CV_IS_MASK_ARR(_missing) ||
+            !CV_ARE_SIZES_EQ(_missing, _sample) )
+            CV_Error( CV_StsBadArg,
+            "the missing data mask must be 8-bit vector of the same size as input sample" );
+    }
+
+    int i, weak_count = cvSliceLength( slice, weak );
+    if( weak_count >= weak->total )
+    {
+        weak_count = weak->total;
+        slice.start_index = 0;
+    }
+
+    if( weak_responses )
+    {
+        if( !CV_IS_MAT(weak_responses) ||
+            CV_MAT_TYPE(weak_responses->type) != CV_32FC1 ||
+            (weak_responses->cols != 1 && weak_responses->rows != 1) ||
+            weak_responses->cols + weak_responses->rows - 1 != weak_count )
+            CV_Error( CV_StsBadArg,
+            "The output matrix of weak classifier responses must be valid "
+            "floating-point vector of the same number of components as the length of input slice" );
+        wstep = CV_IS_MAT_CONT(weak_responses->type) ? 1 : weak_responses->step/sizeof(float);
+    }
+
+    int var_count = active_vars->cols;
+    const int* vtype = data->var_type->data.i;
+    const int* cmap = data->cat_map->data.i;
+    const int* cofs = data->cat_ofs->data.i;
+
+    cv::Mat sample = cv::cvarrToMat(_sample);
+    cv::Mat missing;
+    if(!_missing)
+        missing = cv::cvarrToMat(_missing);
+
+    // if need, preprocess the input vector
+    if( !raw_mode )
+    {
+        int sstep, mstep = 0;
+        const float* src_sample;
+        const uchar* src_mask = 0;
+        float* dst_sample;
+        uchar* dst_mask;
+        const int* vidx = active_vars->data.i;
+        const int* vidx_abs = active_vars_abs->data.i;
+        bool have_mask = _missing != 0;
+
+        sample = cv::Mat(1, var_count, CV_32FC1);
+        missing = cv::Mat(1, var_count, CV_8UC1);
+
+        dst_sample = sample.ptr<float>();
+        dst_mask = missing.ptr<uchar>();
+
+        src_sample = _sample->data.fl;
+        sstep = CV_IS_MAT_CONT(_sample->type) ? 1 : _sample->step/sizeof(src_sample[0]);
+
+        if( _missing )
+        {
+            src_mask = _missing->data.ptr;
+            mstep = CV_IS_MAT_CONT(_missing->type) ? 1 : _missing->step;
+        }
+
+        for( i = 0; i < var_count; i++ )
+        {
+            int idx = vidx[i], idx_abs = vidx_abs[i];
+            float val = src_sample[idx_abs*sstep];
+            int ci = vtype[idx];
+            uchar m = src_mask ? src_mask[idx_abs*mstep] : (uchar)0;
+
+            if( ci >= 0 )
+            {
+                int a = cofs[ci], b = (ci+1 >= data->cat_ofs->cols) ? data->cat_map->cols : cofs[ci+1],
+                    c = a;
+                int ival = cvRound(val);
+                if ( (ival != val) && (!m) )
+                    CV_Error( CV_StsBadArg,
+                        "one of input categorical variable is not an integer" );
+
+                while( a < b )
+                {
+                    c = (a + b) >> 1;
+                    if( ival < cmap[c] )
+                        b = c;
+                    else if( ival > cmap[c] )
+                        a = c+1;
+                    else
+                        break;
+                }
+
+                if( c < 0 || ival != cmap[c] )
+                {
+                    m = 1;
+                    have_mask = true;
+                }
+                else
+                {
+                    val = (float)(c - cofs[ci]);
+                }
+            }
+
+            dst_sample[i] = val;
+            dst_mask[i] = m;
+        }
+
+        if( !have_mask )
+            missing.release();
+    }
+    else
+    {
+        if( !CV_IS_MAT_CONT(_sample->type & (_missing ? _missing->type : -1)) )
+            CV_Error( CV_StsBadArg, "In raw mode the input vectors must be continuous" );
+    }
+
+    cvStartReadSeq( weak, &reader );
+    cvSetSeqReaderPos( &reader, slice.start_index );
+
+    sample_data = sample.ptr<float>();
+
+    if( !have_active_cat_vars && missing.empty() && !weak_responses )
+    {
+        for( i = 0; i < weak_count; i++ )
+        {
+            CvBoostTree* wtree;
+            const CvDTreeNode* node;
+            CV_READ_SEQ_ELEM( wtree, reader );
+
+            node = wtree->get_root();
+            while( node->left )
+            {
+                CvDTreeSplit* split = node->split;
+                int vi = split->condensed_idx;
+                float val = sample_data[vi];
+                int dir = val <= split->ord.c ? -1 : 1;
+                if( split->inversed )
+                    dir = -dir;
+                node = dir < 0 ? node->left : node->right;
+            }
+            sum += node->value;
+        }
+    }
+    else
+    {
+        const int* avars = active_vars->data.i;
+        const uchar* m = !missing.empty() ? missing.ptr<uchar>() : 0;
+
+        // full-featured version
+        for( i = 0; i < weak_count; i++ )
+        {
+            CvBoostTree* wtree;
+            const CvDTreeNode* node;
+            CV_READ_SEQ_ELEM( wtree, reader );
+
+            node = wtree->get_root();
+            while( node->left )
+            {
+                const CvDTreeSplit* split = node->split;
+                int dir = 0;
+                for( ; !dir && split != 0; split = split->next )
+                {
+                    int vi = split->condensed_idx;
+                    int ci = vtype[avars[vi]];
+                    float val = sample_data[vi];
+                    if( m && m[vi] )
+                        continue;
+                    if( ci < 0 ) // ordered
+                        dir = val <= split->ord.c ? -1 : 1;
+                    else // categorical
+                    {
+                        int c = cvRound(val);
+                        dir = CV_DTREE_CAT_DIR(c, split->subset);
+                    }
+                    if( split->inversed )
+                        dir = -dir;
+                }
+
+                if( !dir )
+                {
+                    int diff = node->right->sample_count - node->left->sample_count;
+                    dir = diff < 0 ? -1 : 1;
+                }
+                node = dir < 0 ? node->left : node->right;
+            }
+            if( weak_responses )
+                weak_responses->data.fl[i*wstep] = (float)node->value;
+            sum += node->value;
+        }
+    }
+
+    if( return_sum )
+        value = (float)sum;
+    else
+    {
+        int cls_idx = sum >= 0;
+        if( raw_mode )
+            value = (float)cls_idx;
+        else
+            value = (float)cmap[cofs[vtype[data->var_count]] + cls_idx];
+    }
+
+    return value;
+}
+
+float CvBoost::calc_error( CvMLData* _data, int type, std::vector<float> *resp )
+{
+    float err = 0;
+    const CvMat* values = _data->get_values();
+    const CvMat* response = _data->get_responses();
+    const CvMat* missing = _data->get_missing();
+    const CvMat* sample_idx = (type == CV_TEST_ERROR) ? _data->get_test_sample_idx() : _data->get_train_sample_idx();
+    const CvMat* var_types = _data->get_var_types();
+    int* sidx = sample_idx ? sample_idx->data.i : 0;
+    int r_step = CV_IS_MAT_CONT(response->type) ?
+                1 : response->step / CV_ELEM_SIZE(response->type);
+    bool is_classifier = var_types->data.ptr[var_types->cols-1] == CV_VAR_CATEGORICAL;
+    int sample_count = sample_idx ? sample_idx->cols : 0;
+    sample_count = (type == CV_TRAIN_ERROR && sample_count == 0) ? values->rows : sample_count;
+    float* pred_resp = 0;
+    if( resp && (sample_count > 0) )
+    {
+        resp->resize( sample_count );
+        pred_resp = &((*resp)[0]);
+    }
+    if ( is_classifier )
+    {
+        for( int i = 0; i < sample_count; i++ )
+        {
+            CvMat sample, miss;
+            int si = sidx ? sidx[i] : i;
+            cvGetRow( values, &sample, si );
+            if( missing )
+                cvGetRow( missing, &miss, si );
+            float r = (float)predict( &sample, missing ? &miss : 0 );
+            if( pred_resp )
+                pred_resp[i] = r;
+            int d = fabs((double)r - response->data.fl[si*r_step]) <= FLT_EPSILON ? 0 : 1;
+            err += d;
+        }
+        err = sample_count ? err / (float)sample_count * 100 : -FLT_MAX;
+    }
+    else
+    {
+        for( int i = 0; i < sample_count; i++ )
+        {
+            CvMat sample, miss;
+            int si = sidx ? sidx[i] : i;
+            cvGetRow( values, &sample, si );
+            if( missing )
+                cvGetRow( missing, &miss, si );
+            float r = (float)predict( &sample, missing ? &miss : 0 );
+            if( pred_resp )
+                pred_resp[i] = r;
+            float d = r - response->data.fl[si*r_step];
+            err += d*d;
+        }
+        err = sample_count ? err / (float)sample_count : -FLT_MAX;
+    }
+    return err;
+}
+
+void CvBoost::write_params( CvFileStorage* fs ) const
+{
+    const char* boost_type_str =
+        params.boost_type == DISCRETE ? "DiscreteAdaboost" :
+        params.boost_type == REAL ? "RealAdaboost" :
+        params.boost_type == LOGIT ? "LogitBoost" :
+        params.boost_type == GENTLE ? "GentleAdaboost" : 0;
+
+    const char* split_crit_str =
+        params.split_criteria == DEFAULT ? "Default" :
+        params.split_criteria == GINI ? "Gini" :
+        params.boost_type == MISCLASS ? "Misclassification" :
+        params.boost_type == SQERR ? "SquaredErr" : 0;
+
+    if( boost_type_str )
+        cvWriteString( fs, "boosting_type", boost_type_str );
+    else
+        cvWriteInt( fs, "boosting_type", params.boost_type );
+
+    if( split_crit_str )
+        cvWriteString( fs, "splitting_criteria", split_crit_str );
+    else
+        cvWriteInt( fs, "splitting_criteria", params.split_criteria );
+
+    cvWriteInt( fs, "ntrees", weak->total );
+    cvWriteReal( fs, "weight_trimming_rate", params.weight_trim_rate );
+
+    data->write_params( fs );
+}
+
+
+void CvBoost::read_params( CvFileStorage* fs, CvFileNode* fnode )
+{
+    CV_FUNCNAME( "CvBoost::read_params" );
+
+    __BEGIN__;
+
+    CvFileNode* temp;
+
+    if( !fnode || !CV_NODE_IS_MAP(fnode->tag) )
+        return;
+
+    data = new CvDTreeTrainData();
+    CV_CALL( data->read_params(fs, fnode));
+    data->shared = true;
+
+    params.max_depth = data->params.max_depth;
+    params.min_sample_count = data->params.min_sample_count;
+    params.max_categories = data->params.max_categories;
+    params.priors = data->params.priors;
+    params.regression_accuracy = data->params.regression_accuracy;
+    params.use_surrogates = data->params.use_surrogates;
+
+    temp = cvGetFileNodeByName( fs, fnode, "boosting_type" );
+    if( !temp )
+        return;
+
+    if( temp && CV_NODE_IS_STRING(temp->tag) )
+    {
+        const char* boost_type_str = cvReadString( temp, "" );
+        params.boost_type = strcmp( boost_type_str, "DiscreteAdaboost" ) == 0 ? DISCRETE :
+                            strcmp( boost_type_str, "RealAdaboost" ) == 0 ? REAL :
+                            strcmp( boost_type_str, "LogitBoost" ) == 0 ? LOGIT :
+                            strcmp( boost_type_str, "GentleAdaboost" ) == 0 ? GENTLE : -1;
+    }
+    else
+        params.boost_type = cvReadInt( temp, -1 );
+
+    if( params.boost_type < DISCRETE || params.boost_type > GENTLE )
+        CV_ERROR( CV_StsBadArg, "Unknown boosting type" );
+
+    temp = cvGetFileNodeByName( fs, fnode, "splitting_criteria" );
+    if( temp && CV_NODE_IS_STRING(temp->tag) )
+    {
+        const char* split_crit_str = cvReadString( temp, "" );
+        params.split_criteria = strcmp( split_crit_str, "Default" ) == 0 ? DEFAULT :
+                                strcmp( split_crit_str, "Gini" ) == 0 ? GINI :
+                                strcmp( split_crit_str, "Misclassification" ) == 0 ? MISCLASS :
+                                strcmp( split_crit_str, "SquaredErr" ) == 0 ? SQERR : -1;
+    }
+    else
+        params.split_criteria = cvReadInt( temp, -1 );
+
+    if( params.split_criteria < DEFAULT || params.boost_type > SQERR )
+        CV_ERROR( CV_StsBadArg, "Unknown boosting type" );
+
+    params.weak_count = cvReadIntByName( fs, fnode, "ntrees" );
+    params.weight_trim_rate = cvReadRealByName( fs, fnode, "weight_trimming_rate", 0. );
+
+    __END__;
+}
+
+
+
+void
+CvBoost::read( CvFileStorage* fs, CvFileNode* node )
+{
+    CV_FUNCNAME( "CvBoost::read" );
+
+    __BEGIN__;
+
+    CvSeqReader reader;
+    CvFileNode* trees_fnode;
+    CvMemStorage* storage;
+    int i, ntrees;
+
+    clear();
+    read_params( fs, node );
+
+    if( !data )
+        EXIT;
+
+    trees_fnode = cvGetFileNodeByName( fs, node, "trees" );
+    if( !trees_fnode || !CV_NODE_IS_SEQ(trees_fnode->tag) )
+        CV_ERROR( CV_StsParseError, "<trees> tag is missing" );
+
+    cvStartReadSeq( trees_fnode->data.seq, &reader );
+    ntrees = trees_fnode->data.seq->total;
+
+    if( ntrees != params.weak_count )
+        CV_ERROR( CV_StsUnmatchedSizes,
+        "The number of trees stored does not match <ntrees> tag value" );
+
+    CV_CALL( storage = cvCreateMemStorage() );
+    weak = cvCreateSeq( 0, sizeof(CvSeq), sizeof(CvBoostTree*), storage );
+
+    for( i = 0; i < ntrees; i++ )
+    {
+        CvBoostTree* tree = new CvBoostTree();
+        CV_CALL(tree->read( fs, (CvFileNode*)reader.ptr, this, data ));
+        CV_NEXT_SEQ_ELEM( reader.seq->elem_size, reader );
+        cvSeqPush( weak, &tree );
+    }
+    get_active_vars();
+
+    __END__;
+}
+
+
+void
+CvBoost::write( CvFileStorage* fs, const char* name ) const
+{
+    CV_FUNCNAME( "CvBoost::write" );
+
+    __BEGIN__;
+
+    CvSeqReader reader;
+    int i;
+
+    cvStartWriteStruct( fs, name, CV_NODE_MAP, CV_TYPE_NAME_ML_BOOSTING );
+
+    if( !weak )
+        CV_ERROR( CV_StsBadArg, "The classifier has not been trained yet" );
+
+    write_params( fs );
+    cvStartWriteStruct( fs, "trees", CV_NODE_SEQ );
+
+    cvStartReadSeq( weak, &reader );
+
+    for( i = 0; i < weak->total; i++ )
+    {
+        CvBoostTree* tree;
+        CV_READ_SEQ_ELEM( tree, reader );
+        cvStartWriteStruct( fs, 0, CV_NODE_MAP );
+        tree->write( fs );
+        cvEndWriteStruct( fs );
+    }
+
+    cvEndWriteStruct( fs );
+    cvEndWriteStruct( fs );
+
+    __END__;
+}
+
+
+CvMat*
+CvBoost::get_weights()
+{
+    return weights;
+}
+
+
+CvMat*
+CvBoost::get_subtree_weights()
+{
+    return subtree_weights;
+}
+
+
+CvMat*
+CvBoost::get_weak_response()
+{
+    return weak_eval;
+}
+
+
+const CvBoostParams&
+CvBoost::get_params() const
+{
+    return params;
+}
+
+CvSeq* CvBoost::get_weak_predictors()
+{
+    return weak;
+}
+
+const CvDTreeTrainData* CvBoost::get_data() const
+{
+    return data;
+}
+
+using namespace cv;
+
+CvBoost::CvBoost( const Mat& _train_data, int _tflag,
+               const Mat& _responses, const Mat& _var_idx,
+               const Mat& _sample_idx, const Mat& _var_type,
+               const Mat& _missing_mask,
+               CvBoostParams _params )
+{
+    weak = 0;
+    data = 0;
+    default_model_name = "my_boost_tree";
+    active_vars = active_vars_abs = orig_response = sum_response = weak_eval =
+        subsample_mask = weights = subtree_weights = 0;
+
+    train( _train_data, _tflag, _responses, _var_idx, _sample_idx,
+          _var_type, _missing_mask, _params );
+}
+
+
+bool
+CvBoost::train( const Mat& _train_data, int _tflag,
+               const Mat& _responses, const Mat& _var_idx,
+               const Mat& _sample_idx, const Mat& _var_type,
+               const Mat& _missing_mask,
+               CvBoostParams _params, bool _update )
+{
+    train_data_hdr = _train_data;
+    train_data_mat = _train_data;
+    responses_hdr = _responses;
+    responses_mat = _responses;
+
+    CvMat vidx = _var_idx, sidx = _sample_idx, vtype = _var_type, mmask = _missing_mask;
+
+    return train(&train_data_hdr, _tflag, &responses_hdr, vidx.data.ptr ? &vidx : 0,
+          sidx.data.ptr ? &sidx : 0, vtype.data.ptr ? &vtype : 0,
+          mmask.data.ptr ? &mmask : 0, _params, _update);
+}
+
+float
+CvBoost::predict( const Mat& _sample, const Mat& _missing,
+                  const Range& slice, bool raw_mode, bool return_sum ) const
+{
+    CvMat sample = _sample, mmask = _missing;
+    /*if( weak_responses )
+    {
+        int weak_count = cvSliceLength( slice, weak );
+        if( weak_count >= weak->total )
+        {
+            weak_count = weak->total;
+            slice.start_index = 0;
+        }
+
+        if( !(weak_responses->data && weak_responses->type() == CV_32FC1 &&
+              (weak_responses->cols == 1 || weak_responses->rows == 1) &&
+              weak_responses->cols + weak_responses->rows - 1 == weak_count) )
+            weak_responses->create(weak_count, 1, CV_32FC1);
+        pwr = &(wr = *weak_responses);
+    }*/
+    return predict(&sample, _missing.empty() ? 0 : &mmask, 0,
+                   slice == Range::all() ? CV_WHOLE_SEQ : cvSlice(slice.start, slice.end),
+                   raw_mode, return_sum);
+}
+
+/* End of file. */
diff --git a/apps/traincascade/old_ml_data.cpp b/apps/traincascade/old_ml_data.cpp
new file mode 100644 (file)
index 0000000..d221dcb
--- /dev/null
@@ -0,0 +1,792 @@
+/*M///////////////////////////////////////////////////////////////////////////////////////
+//
+//  IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
+//
+//  By downloading, copying, installing or using the software you agree to this license.
+//  If you do not agree to this license, do not download, install,
+//  copy or use the software.
+//
+//
+//                        Intel License Agreement
+//
+// Copyright (C) 2000, Intel Corporation, all rights reserved.
+// Third party copyrights are property of their respective owners.
+//
+// Redistribution and use in source and binary forms, with or without modification,
+// are permitted provided that the following conditions are met:
+//
+//   * Redistribution's of source code must retain the above copyright notice,
+//     this list of conditions and the following disclaimer.
+//
+//   * Redistribution's in binary form must reproduce the above copyright notice,
+//     this list of conditions and the following disclaimer in the documentation
+//     and/or other materials provided with the distribution.
+//
+//   * The name of Intel Corporation may not be used to endorse or promote products
+//     derived from this software without specific prior written permission.
+//
+// This software is provided by the copyright holders and contributors "as is" and
+// any express or implied warranties, including, but not limited to, the implied
+// warranties of merchantability and fitness for a particular purpose are disclaimed.
+// In no event shall the Intel Corporation or contributors be liable for any direct,
+// indirect, incidental, special, exemplary, or consequential damages
+// (including, but not limited to, procurement of substitute goods or services;
+// loss of use, data, or profits; or business interruption) however caused
+// and on any theory of liability, whether in contract, strict liability,
+// or tort (including negligence or otherwise) arising in any way out of
+// the use of this software, even if advised of the possibility of such damage.
+//
+//M*/
+
+#include "old_ml_precomp.hpp"
+#include <ctype.h>
+
+#define MISS_VAL    FLT_MAX
+#define CV_VAR_MISS    0
+
+CvTrainTestSplit::CvTrainTestSplit()
+{
+    train_sample_part_mode = CV_COUNT;
+    train_sample_part.count = -1;
+    mix = false;
+}
+
+CvTrainTestSplit::CvTrainTestSplit( int _train_sample_count, bool _mix )
+{
+    train_sample_part_mode = CV_COUNT;
+    train_sample_part.count = _train_sample_count;
+    mix = _mix;
+}
+
+CvTrainTestSplit::CvTrainTestSplit( float _train_sample_portion, bool _mix )
+{
+    train_sample_part_mode = CV_PORTION;
+    train_sample_part.portion = _train_sample_portion;
+    mix = _mix;
+}
+
+////////////////
+
+CvMLData::CvMLData()
+{
+    values = missing = var_types = var_idx_mask = response_out = var_idx_out = var_types_out = 0;
+    train_sample_idx = test_sample_idx = 0;
+    header_lines_number = 0;
+    sample_idx = 0;
+    response_idx = -1;
+
+    train_sample_count = -1;
+
+    delimiter = ',';
+    miss_ch = '?';
+    //flt_separator = '.';
+
+    rng = &cv::theRNG();
+}
+
+CvMLData::~CvMLData()
+{
+    clear();
+}
+
+void CvMLData::free_train_test_idx()
+{
+    cvReleaseMat( &train_sample_idx );
+    cvReleaseMat( &test_sample_idx );
+    sample_idx = 0;
+}
+
+void CvMLData::clear()
+{
+    class_map.clear();
+
+    cvReleaseMat( &values );
+    cvReleaseMat( &missing );
+    cvReleaseMat( &var_types );
+    cvReleaseMat( &var_idx_mask );
+
+    cvReleaseMat( &response_out );
+    cvReleaseMat( &var_idx_out );
+    cvReleaseMat( &var_types_out );
+
+    free_train_test_idx();
+
+    total_class_count = 0;
+
+    response_idx = -1;
+
+    train_sample_count = -1;
+}
+
+
+void CvMLData::set_header_lines_number( int idx )
+{
+    header_lines_number = std::max(0, idx);
+}
+
+int CvMLData::get_header_lines_number() const
+{
+    return header_lines_number;
+}
+
+static char *fgets_chomp(char *str, int n, FILE *stream)
+{
+    char *head = fgets(str, n, stream);
+    if( head )
+    {
+        for(char *tail = head + strlen(head) - 1; tail >= head; --tail)
+        {
+            if( *tail != '\r'  && *tail != '\n' )
+                break;
+            *tail = '\0';
+        }
+    }
+    return head;
+}
+
+
+int CvMLData::read_csv(const char* filename)
+{
+    const int M = 1000000;
+    const char str_delimiter[3] = { ' ', delimiter, '\0' };
+    FILE* file = 0;
+    CvMemStorage* storage;
+    CvSeq* seq;
+    char *ptr;
+    float* el_ptr;
+    CvSeqReader reader;
+    int cols_count = 0;
+    uchar *var_types_ptr = 0;
+
+    clear();
+
+    file = fopen( filename, "rt" );
+
+    if( !file )
+        return -1;
+
+    std::vector<char> _buf(M);
+    char* buf = &_buf[0];
+
+    // skip header lines
+    for( int i = 0; i < header_lines_number; i++ )
+    {
+        if( fgets( buf, M, file ) == 0 )
+        {
+            fclose(file);
+            return -1;
+        }
+    }
+
+    // read the first data line and determine the number of variables
+    if( !fgets_chomp( buf, M, file ))
+    {
+        fclose(file);
+        return -1;
+    }
+
+    ptr = buf;
+    while( *ptr == ' ' )
+        ptr++;
+    for( ; *ptr != '\0'; )
+    {
+        if(*ptr == delimiter || *ptr == ' ')
+        {
+            cols_count++;
+            ptr++;
+            while( *ptr == ' ' ) ptr++;
+        }
+        else
+            ptr++;
+    }
+
+    cols_count++;
+
+    if ( cols_count == 0)
+    {
+        fclose(file);
+        return -1;
+    }
+
+    // create temporary memory storage to store the whole database
+    el_ptr = new float[cols_count];
+    storage = cvCreateMemStorage();
+    seq = cvCreateSeq( 0, sizeof(*seq), cols_count*sizeof(float), storage );
+
+    var_types = cvCreateMat( 1, cols_count, CV_8U );
+    cvZero( var_types );
+    var_types_ptr = var_types->data.ptr;
+
+    for(;;)
+    {
+        char *token = NULL;
+        int type;
+        token = strtok(buf, str_delimiter);
+        if (!token)
+            break;
+        for (int i = 0; i < cols_count-1; i++)
+        {
+            str_to_flt_elem( token, el_ptr[i], type);
+            var_types_ptr[i] |= type;
+            token = strtok(NULL, str_delimiter);
+            if (!token)
+            {
+                fclose(file);
+                delete [] el_ptr;
+                return -1;
+            }
+        }
+        str_to_flt_elem( token, el_ptr[cols_count-1], type);
+        var_types_ptr[cols_count-1] |= type;
+        cvSeqPush( seq, el_ptr );
+        if( !fgets_chomp( buf, M, file ) )
+            break;
+    }
+    fclose(file);
+
+    values = cvCreateMat( seq->total, cols_count, CV_32FC1 );
+    missing = cvCreateMat( seq->total, cols_count, CV_8U );
+    var_idx_mask = cvCreateMat( 1, values->cols, CV_8UC1 );
+    cvSet( var_idx_mask, cvRealScalar(1) );
+    train_sample_count = seq->total;
+
+    cvStartReadSeq( seq, &reader );
+    for(int i = 0; i < seq->total; i++ )
+    {
+        const float* sdata = (float*)reader.ptr;
+        float* ddata = values->data.fl + cols_count*i;
+        uchar* dm = missing->data.ptr + cols_count*i;
+
+        for( int j = 0; j < cols_count; j++ )
+        {
+            ddata[j] = sdata[j];
+            dm[j] = ( fabs( MISS_VAL - sdata[j] ) <= FLT_EPSILON );
+        }
+        CV_NEXT_SEQ_ELEM( seq->elem_size, reader );
+    }
+
+    if ( cvNorm( missing, 0, CV_L1 ) <= FLT_EPSILON )
+        cvReleaseMat( &missing );
+
+    cvReleaseMemStorage( &storage );
+    delete []el_ptr;
+    return 0;
+}
+
+const CvMat* CvMLData::get_values() const
+{
+    return values;
+}
+
+const CvMat* CvMLData::get_missing() const
+{
+    CV_FUNCNAME( "CvMLData::get_missing" );
+    __BEGIN__;
+
+    if ( !values )
+        CV_ERROR( CV_StsInternal, "data is empty" );
+
+    __END__;
+
+    return missing;
+}
+
+const std::map<cv::String, int>& CvMLData::get_class_labels_map() const
+{
+    return class_map;
+}
+
+void CvMLData::str_to_flt_elem( const char* token, float& flt_elem, int& type)
+{
+
+    char* stopstring = NULL;
+    flt_elem = (float)strtod( token, &stopstring );
+    assert( stopstring );
+    type = CV_VAR_ORDERED;
+    if ( *stopstring == miss_ch && strlen(stopstring) == 1 ) // missed value
+    {
+        flt_elem = MISS_VAL;
+        type = CV_VAR_MISS;
+    }
+    else
+    {
+        if ( (*stopstring != 0) && (*stopstring != '\n') && (strcmp(stopstring, "\r\n") != 0) ) // class label
+        {
+            int idx = class_map[token];
+            if ( idx == 0)
+            {
+                total_class_count++;
+                idx = total_class_count;
+                class_map[token] = idx;
+            }
+            flt_elem = (float)idx;
+            type = CV_VAR_CATEGORICAL;
+        }
+    }
+}
+
+void CvMLData::set_delimiter(char ch)
+{
+    CV_FUNCNAME( "CvMLData::set_delimited" );
+    __BEGIN__;
+
+    if (ch == miss_ch /*|| ch == flt_separator*/)
+        CV_ERROR(CV_StsBadArg, "delimited, miss_character and flt_separator must be different");
+
+    delimiter = ch;
+
+    __END__;
+}
+
+char CvMLData::get_delimiter() const
+{
+    return delimiter;
+}
+
+void CvMLData::set_miss_ch(char ch)
+{
+    CV_FUNCNAME( "CvMLData::set_miss_ch" );
+    __BEGIN__;
+
+    if (ch == delimiter/* || ch == flt_separator*/)
+        CV_ERROR(CV_StsBadArg, "delimited, miss_character and flt_separator must be different");
+
+    miss_ch = ch;
+
+    __END__;
+}
+
+char CvMLData::get_miss_ch() const
+{
+    return miss_ch;
+}
+
+void CvMLData::set_response_idx( int idx )
+{
+    CV_FUNCNAME( "CvMLData::set_response_idx" );
+    __BEGIN__;
+
+    if ( !values )
+        CV_ERROR( CV_StsInternal, "data is empty" );
+
+    if ( idx >= values->cols)
+        CV_ERROR( CV_StsBadArg, "idx value is not correct" );
+
+    if ( response_idx >= 0 )
+        chahge_var_idx( response_idx, true );
+    if ( idx >= 0 )
+        chahge_var_idx( idx, false );
+    response_idx = idx;
+
+    __END__;
+}
+
+int CvMLData::get_response_idx() const
+{
+    CV_FUNCNAME( "CvMLData::get_response_idx" );
+    __BEGIN__;
+
+    if ( !values )
+        CV_ERROR( CV_StsInternal, "data is empty" );
+     __END__;
+    return response_idx;
+}
+
+void CvMLData::change_var_type( int var_idx, int type )
+{
+    CV_FUNCNAME( "CvMLData::change_var_type" );
+    __BEGIN__;
+
+    int var_count = 0;
+
+    if ( !values )
+        CV_ERROR( CV_StsInternal, "data is empty" );
+
+     var_count = values->cols;
+
+    if ( var_idx < 0 || var_idx >= var_count)
+        CV_ERROR( CV_StsBadArg, "var_idx is not correct" );
+
+    if ( type != CV_VAR_ORDERED && type != CV_VAR_CATEGORICAL)
+         CV_ERROR( CV_StsBadArg, "type is not correct" );
+
+    assert( var_types );
+    if ( var_types->data.ptr[var_idx] == CV_VAR_CATEGORICAL && type == CV_VAR_ORDERED)
+        CV_ERROR( CV_StsBadArg, "it`s impossible to assign CV_VAR_ORDERED type to categorical variable" );
+    var_types->data.ptr[var_idx] = (uchar)type;
+
+    __END__;
+
+    return;
+}
+
+void CvMLData::set_var_types( const char* str )
+{
+    CV_FUNCNAME( "CvMLData::set_var_types" );
+    __BEGIN__;
+
+    const char* ord = 0, *cat = 0;
+    int var_count = 0, set_var_type_count = 0;
+    if ( !values )
+        CV_ERROR( CV_StsInternal, "data is empty" );
+
+    var_count = values->cols;
+
+    assert( var_types );
+
+    ord = strstr( str, "ord" );
+    cat = strstr( str, "cat" );
+    if ( !ord && !cat )
+        CV_ERROR( CV_StsBadArg, "types string is not correct" );
+
+    if ( !ord && strlen(cat) == 3 ) // str == "cat"
+    {
+        cvSet( var_types, cvScalarAll(CV_VAR_CATEGORICAL) );
+        return;
+    }
+
+    if ( !cat && strlen(ord) == 3 ) // str == "ord"
+    {
+        cvSet( var_types, cvScalarAll(CV_VAR_ORDERED) );
+        return;
+    }
+
+    if ( ord ) // parse ord str
+    {
+        char* stopstring = NULL;
+        if ( ord[3] != '[')
+            CV_ERROR( CV_StsBadArg, "types string is not correct" );
+
+        ord += 4; // pass "ord["
+        do
+        {
+            int b1 = (int)strtod( ord, &stopstring );
+            if ( *stopstring == 0 || (*stopstring != ',' && *stopstring != ']' && *stopstring != '-') )
+                CV_ERROR( CV_StsBadArg, "types string is not correct" );
+            ord = stopstring + 1;
+            if ( (stopstring[0] == ',') || (stopstring[0] == ']'))
+            {
+                if ( var_types->data.ptr[b1] == CV_VAR_CATEGORICAL)
+                    CV_ERROR( CV_StsBadArg, "it`s impossible to assign CV_VAR_ORDERED type to categorical variable" );
+                var_types->data.ptr[b1] = CV_VAR_ORDERED;
+                set_var_type_count++;
+            }
+            else
+            {
+                if ( stopstring[0] == '-')
+                {
+                    int b2 = (int)strtod( ord, &stopstring);
+                    if ( (*stopstring == 0) || (*stopstring != ',' && *stopstring != ']') )
+                        CV_ERROR( CV_StsBadArg, "types string is not correct" );
+                    ord = stopstring + 1;
+                    for (int i = b1; i <= b2; i++)
+                    {
+                        if ( var_types->data.ptr[i] == CV_VAR_CATEGORICAL)
+                            CV_ERROR( CV_StsBadArg, "it`s impossible to assign CV_VAR_ORDERED type to categorical variable" );
+                        var_types->data.ptr[i] = CV_VAR_ORDERED;
+                    }
+                    set_var_type_count += b2 - b1 + 1;
+                }
+                else
+                    CV_ERROR( CV_StsBadArg, "types string is not correct" );
+
+            }
+        }
+        while (*stopstring != ']');
+
+        if ( stopstring[1] != '\0' && stopstring[1] != ',')
+            CV_ERROR( CV_StsBadArg, "types string is not correct" );
+    }
+
+    if ( cat ) // parse cat str
+    {
+        char* stopstring = NULL;
+        if ( cat[3] != '[')
+            CV_ERROR( CV_StsBadArg, "types string is not correct" );
+
+        cat += 4; // pass "cat["
+        do
+        {
+            int b1 = (int)strtod( cat, &stopstring );
+            if ( *stopstring == 0 || (*stopstring != ',' && *stopstring != ']' && *stopstring != '-') )
+                CV_ERROR( CV_StsBadArg, "types string is not correct" );
+            cat = stopstring + 1;
+            if ( (stopstring[0] == ',') || (stopstring[0] == ']'))
+            {
+                var_types->data.ptr[b1] = CV_VAR_CATEGORICAL;
+                set_var_type_count++;
+            }
+            else
+            {
+                if ( stopstring[0] == '-')
+                {
+                    int b2 = (int)strtod( cat, &stopstring);
+                    if ( (*stopstring == 0) || (*stopstring != ',' && *stopstring != ']') )
+                        CV_ERROR( CV_StsBadArg, "types string is not correct" );
+                    cat = stopstring + 1;
+                    for (int i = b1; i <= b2; i++)
+                        var_types->data.ptr[i] = CV_VAR_CATEGORICAL;
+                    set_var_type_count += b2 - b1 + 1;
+                }
+                else
+                    CV_ERROR( CV_StsBadArg, "types string is not correct" );
+
+            }
+        }
+        while (*stopstring != ']');
+
+        if ( stopstring[1] != '\0' && stopstring[1] != ',')
+            CV_ERROR( CV_StsBadArg, "types string is not correct" );
+    }
+
+    if (set_var_type_count != var_count)
+        CV_ERROR( CV_StsBadArg, "types string is not correct" );
+
+     __END__;
+}
+
+const CvMat* CvMLData::get_var_types()
+{
+    CV_FUNCNAME( "CvMLData::get_var_types" );
+    __BEGIN__;
+
+    uchar *var_types_out_ptr = 0;
+    int avcount, vt_size;
+    if ( !values )
+        CV_ERROR( CV_StsInternal, "data is empty" );
+
+    assert( var_idx_mask );
+
+    avcount = cvFloor( cvNorm( var_idx_mask, 0, CV_L1 ) );
+    vt_size = avcount + (response_idx >= 0);
+
+    if ( avcount == values->cols || (avcount == values->cols-1 && response_idx == values->cols-1) )
+        return var_types;
+
+    if ( !var_types_out || ( var_types_out && var_types_out->cols != vt_size ) )
+    {
+        cvReleaseMat( &var_types_out );
+        var_types_out = cvCreateMat( 1, vt_size, CV_8UC1 );
+    }
+
+    var_types_out_ptr = var_types_out->data.ptr;
+    for( int i = 0; i < var_types->cols; i++)
+    {
+        if (i == response_idx || !var_idx_mask->data.ptr[i]) continue;
+        *var_types_out_ptr = var_types->data.ptr[i];
+        var_types_out_ptr++;
+    }
+    if ( response_idx >= 0 )
+        *var_types_out_ptr = var_types->data.ptr[response_idx];
+
+    __END__;
+
+    return var_types_out;
+}
+
+int CvMLData::get_var_type( int var_idx ) const
+{
+    return var_types->data.ptr[var_idx];
+}
+
+const CvMat* CvMLData::get_responses()
+{
+    CV_FUNCNAME( "CvMLData::get_responses_ptr" );
+    __BEGIN__;
+
+    int var_count = 0;
+
+    if ( !values )
+        CV_ERROR( CV_StsInternal, "data is empty" );
+    var_count = values->cols;
+
+    if ( response_idx < 0 || response_idx >= var_count )
+       return 0;
+    if ( !response_out )
+        response_out = cvCreateMatHeader( values->rows, 1, CV_32FC1 );
+    else
+        cvInitMatHeader( response_out, values->rows, 1, CV_32FC1);
+    cvGetCol( values, response_out, response_idx );
+
+    __END__;
+
+    return response_out;
+}
+
+void CvMLData::set_train_test_split( const CvTrainTestSplit * spl)
+{
+    CV_FUNCNAME( "CvMLData::set_division" );
+    __BEGIN__;
+
+    int sample_count = 0;
+
+    if ( !values )
+        CV_ERROR( CV_StsInternal, "data is empty" );
+
+    sample_count = values->rows;
+
+    float train_sample_portion;
+
+    if (spl->train_sample_part_mode == CV_COUNT)
+    {
+        train_sample_count = spl->train_sample_part.count;
+        if (train_sample_count > sample_count)
+            CV_ERROR( CV_StsBadArg, "train samples count is not correct" );
+        train_sample_count = train_sample_count<=0 ? sample_count : train_sample_count;
+    }
+    else // dtype.train_sample_part_mode == CV_PORTION
+    {
+        train_sample_portion = spl->train_sample_part.portion;
+        if ( train_sample_portion > 1)
+            CV_ERROR( CV_StsBadArg, "train samples count is not correct" );
+        train_sample_portion = train_sample_portion <= FLT_EPSILON ||
+            1 - train_sample_portion <= FLT_EPSILON ? 1 : train_sample_portion;
+        train_sample_count = std::max(1, cvFloor( train_sample_portion * sample_count ));
+    }
+
+    if ( train_sample_count == sample_count )
+    {
+        free_train_test_idx();
+        return;
+    }
+
+    if ( train_sample_idx && train_sample_idx->cols != train_sample_count )
+        free_train_test_idx();
+
+    if ( !sample_idx)
+    {
+        int test_sample_count = sample_count- train_sample_count;
+        sample_idx = (int*)cvAlloc( sample_count * sizeof(sample_idx[0]) );
+        for (int i = 0; i < sample_count; i++ )
+            sample_idx[i] = i;
+        train_sample_idx = cvCreateMatHeader( 1, train_sample_count, CV_32SC1 );
+        *train_sample_idx = cvMat( 1, train_sample_count, CV_32SC1, &sample_idx[0] );
+
+        CV_Assert(test_sample_count > 0);
+        test_sample_idx = cvCreateMatHeader( 1, test_sample_count, CV_32SC1 );
+        *test_sample_idx = cvMat( 1, test_sample_count, CV_32SC1, &sample_idx[train_sample_count] );
+    }
+
+    mix = spl->mix;
+    if ( mix )
+        mix_train_and_test_idx();
+
+    __END__;
+}
+
+const CvMat* CvMLData::get_train_sample_idx() const
+{
+    CV_FUNCNAME( "CvMLData::get_train_sample_idx" );
+    __BEGIN__;
+
+    if ( !values )
+        CV_ERROR( CV_StsInternal, "data is empty" );
+    __END__;
+
+    return train_sample_idx;
+}
+
+const CvMat* CvMLData::get_test_sample_idx() const
+{
+    CV_FUNCNAME( "CvMLData::get_test_sample_idx" );
+    __BEGIN__;
+
+    if ( !values )
+        CV_ERROR( CV_StsInternal, "data is empty" );
+    __END__;
+
+    return test_sample_idx;
+}
+
+void CvMLData::mix_train_and_test_idx()
+{
+    CV_FUNCNAME( "CvMLData::mix_train_and_test_idx" );
+    __BEGIN__;
+
+    if ( !values )
+        CV_ERROR( CV_StsInternal, "data is empty" );
+    __END__;
+
+    if ( !sample_idx)
+        return;
+
+    if ( train_sample_count > 0 && train_sample_count < values->rows )
+    {
+        int n = values->rows;
+        for (int i = 0; i < n; i++)
+        {
+            int a = (*rng)(n);
+            int b = (*rng)(n);
+            int t;
+            CV_SWAP( sample_idx[a], sample_idx[b], t );
+        }
+    }
+}
+
+const CvMat* CvMLData::get_var_idx()
+{
+     CV_FUNCNAME( "CvMLData::get_var_idx" );
+    __BEGIN__;
+
+    int avcount = 0;
+
+    if ( !values )
+        CV_ERROR( CV_StsInternal, "data is empty" );
+
+    assert( var_idx_mask );
+
+    avcount = cvFloor( cvNorm( var_idx_mask, 0, CV_L1 ) );
+    int* vidx;
+
+    if ( avcount == values->cols )
+        return 0;
+
+    if ( !var_idx_out || ( var_idx_out && var_idx_out->cols != avcount ) )
+    {
+        cvReleaseMat( &var_idx_out );
+        var_idx_out = cvCreateMat( 1, avcount, CV_32SC1);
+        if ( response_idx >=0 )
+            var_idx_mask->data.ptr[response_idx] = 0;
+    }
+
+    vidx = var_idx_out->data.i;
+
+    for(int i = 0; i < var_idx_mask->cols; i++)
+        if ( var_idx_mask->data.ptr[i] )
+        {
+            *vidx = i;
+            vidx++;
+        }
+
+    __END__;
+
+    return var_idx_out;
+}
+
+void CvMLData::chahge_var_idx( int vi, bool state )
+{
+    change_var_idx( vi, state );
+}
+
+void CvMLData::change_var_idx( int vi, bool state )
+{
+     CV_FUNCNAME( "CvMLData::change_var_idx" );
+    __BEGIN__;
+
+    int var_count = 0;
+
+    if ( !values )
+        CV_ERROR( CV_StsInternal, "data is empty" );
+
+    var_count = values->cols;
+
+    if ( vi < 0 || vi >= var_count)
+        CV_ERROR( CV_StsBadArg, "variable index is not correct" );
+
+    assert( var_idx_mask );
+    var_idx_mask->data.ptr[vi] = state;
+
+    __END__;
+}
+
+/* End of file. */
diff --git a/apps/traincascade/old_ml_inner_functions.cpp b/apps/traincascade/old_ml_inner_functions.cpp
new file mode 100644 (file)
index 0000000..10b43f9
--- /dev/null
@@ -0,0 +1,1879 @@
+/*M///////////////////////////////////////////////////////////////////////////////////////
+//
+//  IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
+//
+//  By downloading, copying, installing or using the software you agree to this license.
+//  If you do not agree to this license, do not download, install,
+//  copy or use the software.
+//
+//
+//                        Intel License Agreement
+//
+// Copyright (C) 2000, Intel Corporation, all rights reserved.
+// Third party copyrights are property of their respective owners.
+//
+// Redistribution and use in source and binary forms, with or without modification,
+// are permitted provided that the following conditions are met:
+//
+//   * Redistribution's of source code must retain the above copyright notice,
+//     this list of conditions and the following disclaimer.
+//
+//   * Redistribution's in binary form must reproduce the above copyright notice,
+//     this list of conditions and the following disclaimer in the documentation
+//     and/or other materials provided with the distribution.
+//
+//   * The name of Intel Corporation may not be used to endorse or promote products
+//     derived from this software without specific prior written permission.
+//
+// This software is provided by the copyright holders and contributors "as is" and
+// any express or implied warranties, including, but not limited to, the implied
+// warranties of merchantability and fitness for a particular purpose are disclaimed.
+// In no event shall the Intel Corporation or contributors be liable for any direct,
+// indirect, incidental, special, exemplary, or consequential damages
+// (including, but not limited to, procurement of substitute goods or services;
+// loss of use, data, or profits; or business interruption) however caused
+// and on any theory of liability, whether in contract, strict liability,
+// or tort (including negligence or otherwise) arising in any way out of
+// the use of this software, even if advised of the possibility of such damage.
+//
+//M*/
+
+#include "old_ml_precomp.hpp"
+
+
+CvStatModel::CvStatModel()
+{
+    default_model_name = "my_stat_model";
+}
+
+
+CvStatModel::~CvStatModel()
+{
+    clear();
+}
+
+
+void CvStatModel::clear()
+{
+}
+
+
+void CvStatModel::save( const char* filename, const char* name ) const
+{
+    CvFileStorage* fs = 0;
+
+    CV_FUNCNAME( "CvStatModel::save" );
+
+    __BEGIN__;
+
+    CV_CALL( fs = cvOpenFileStorage( filename, 0, CV_STORAGE_WRITE ));
+    if( !fs )
+        CV_ERROR( CV_StsError, "Could not open the file storage. Check the path and permissions" );
+
+    write( fs, name ? name : default_model_name );
+
+    __END__;
+
+    cvReleaseFileStorage( &fs );
+}
+
+
+void CvStatModel::load( const char* filename, const char* name )
+{
+    CvFileStorage* fs = 0;
+
+    CV_FUNCNAME( "CvStatModel::load" );
+
+    __BEGIN__;
+
+    CvFileNode* model_node = 0;
+
+    CV_CALL( fs = cvOpenFileStorage( filename, 0, CV_STORAGE_READ ));
+    if( !fs )
+        EXIT;
+
+    if( name )
+        model_node = cvGetFileNodeByName( fs, 0, name );
+    else
+    {
+        CvFileNode* root = cvGetRootFileNode( fs );
+        if( root->data.seq->total > 0 )
+            model_node = (CvFileNode*)cvGetSeqElem( root->data.seq, 0 );
+    }
+
+    read( fs, model_node );
+
+    __END__;
+
+    cvReleaseFileStorage( &fs );
+}
+
+
+void CvStatModel::write( CvFileStorage*, const char* ) const
+{
+    OPENCV_ERROR( CV_StsNotImplemented, "CvStatModel::write", "" );
+}
+
+
+void CvStatModel::read( CvFileStorage*, CvFileNode* )
+{
+    OPENCV_ERROR( CV_StsNotImplemented, "CvStatModel::read", "" );
+}
+
+
+/* Calculates upper triangular matrix S, where A is a symmetrical matrix A=S'*S */
+static void cvChol( CvMat* A, CvMat* S )
+{
+    int dim = A->rows;
+
+    int i, j, k;
+    float sum;
+
+    for( i = 0; i < dim; i++ )
+    {
+        for( j = 0; j < i; j++ )
+            CV_MAT_ELEM(*S, float, i, j) = 0;
+
+        sum = 0;
+        for( k = 0; k < i; k++ )
+            sum += CV_MAT_ELEM(*S, float, k, i) * CV_MAT_ELEM(*S, float, k, i);
+
+        CV_MAT_ELEM(*S, float, i, i) = (float)sqrt(CV_MAT_ELEM(*A, float, i, i) - sum);
+
+        for( j = i + 1; j < dim; j++ )
+        {
+            sum = 0;
+            for( k = 0; k < i; k++ )
+                sum += CV_MAT_ELEM(*S, float, k, i) * CV_MAT_ELEM(*S, float, k, j);
+
+            CV_MAT_ELEM(*S, float, i, j) =
+                (CV_MAT_ELEM(*A, float, i, j) - sum) / CV_MAT_ELEM(*S, float, i, i);
+
+        }
+    }
+}
+
+/* Generates <sample> from multivariate normal distribution, where <mean> - is an
+   average row vector, <cov> - symmetric covariation matrix */
+CV_IMPL void cvRandMVNormal( CvMat* mean, CvMat* cov, CvMat* sample, CvRNG* rng )
+{
+    int dim = sample->cols;
+    int amount = sample->rows;
+
+    CvRNG state = rng ? *rng : cvRNG( cvGetTickCount() );
+    cvRandArr(&state, sample, CV_RAND_NORMAL, cvScalarAll(0), cvScalarAll(1) );
+
+    CvMat* utmat = cvCreateMat(dim, dim, sample->type);
+    CvMat* vect = cvCreateMatHeader(1, dim, sample->type);
+
+    cvChol(cov, utmat);
+
+    int i;
+    for( i = 0; i < amount; i++ )
+    {
+        cvGetRow(sample, vect, i);
+        cvMatMulAdd(vect, utmat, mean, vect);
+    }
+
+    cvReleaseMat(&vect);
+    cvReleaseMat(&utmat);
+}
+
+
+/* Generates <sample> of <amount> points from a discrete variate xi,
+   where Pr{xi = k} == probs[k], 0 < k < len - 1. */
+static void cvRandSeries( float probs[], int len, int sample[], int amount )
+{
+    CvMat* univals = cvCreateMat(1, amount, CV_32FC1);
+    float* knots = (float*)cvAlloc( len * sizeof(float) );
+
+    int i, j;
+
+    CvRNG state = cvRNG(-1);
+    cvRandArr(&state, univals, CV_RAND_UNI, cvScalarAll(0), cvScalarAll(1) );
+
+    knots[0] = probs[0];
+    for( i = 1; i < len; i++ )
+        knots[i] = knots[i - 1] + probs[i];
+
+    for( i = 0; i < amount; i++ )
+        for( j = 0; j < len; j++ )
+        {
+            if ( CV_MAT_ELEM(*univals, float, 0, i) <= knots[j] )
+            {
+                sample[i] = j;
+                break;
+            }
+        }
+
+    cvFree(&knots);
+}
+
+/* Generates <sample> from gaussian mixture distribution */
+CV_IMPL void cvRandGaussMixture( CvMat* means[],
+                                 CvMat* covs[],
+                                 float weights[],
+                                 int clsnum,
+                                 CvMat* sample,
+                                 CvMat* sampClasses )
+{
+    int dim = sample->cols;
+    int amount = sample->rows;
+
+    int i, clss;
+
+    int* sample_clsnum = (int*)cvAlloc( amount * sizeof(int) );
+    CvMat** utmats = (CvMat**)cvAlloc( clsnum * sizeof(CvMat*) );
+    CvMat* vect = cvCreateMatHeader(1, dim, CV_32FC1);
+
+    CvMat* classes;
+    if( sampClasses )
+        classes = sampClasses;
+    else
+        classes = cvCreateMat(1, amount, CV_32FC1);
+
+    CvRNG state = cvRNG(-1);
+    cvRandArr(&state, sample, CV_RAND_NORMAL, cvScalarAll(0), cvScalarAll(1));
+
+    cvRandSeries(weights, clsnum, sample_clsnum, amount);
+
+    for( i = 0; i < clsnum; i++ )
+    {
+        utmats[i] = cvCreateMat(dim, dim, CV_32FC1);
+        cvChol(covs[i], utmats[i]);
+    }
+
+    for( i = 0; i < amount; i++ )
+    {
+        CV_MAT_ELEM(*classes, float, 0, i) = (float)sample_clsnum[i];
+        cvGetRow(sample, vect, i);
+        clss = sample_clsnum[i];
+        cvMatMulAdd(vect, utmats[clss], means[clss], vect);
+    }
+
+    if( !sampClasses )
+        cvReleaseMat(&classes);
+    for( i = 0; i < clsnum; i++ )
+        cvReleaseMat(&utmats[i]);
+    cvFree(&utmats);
+    cvFree(&sample_clsnum);
+    cvReleaseMat(&vect);
+}
+
+
+CvMat* icvGenerateRandomClusterCenters ( int seed, const CvMat* data,
+                                         int num_of_clusters, CvMat* _centers )
+{
+    CvMat* centers = _centers;
+
+    CV_FUNCNAME("icvGenerateRandomClusterCenters");
+    __BEGIN__;
+
+    CvRNG rng;
+    CvMat data_comp, centers_comp;
+    CvPoint minLoc, maxLoc; // Not used, just for function "cvMinMaxLoc"
+    double minVal, maxVal;
+    int i;
+    int dim = data ? data->cols : 0;
+
+    if( ICV_IS_MAT_OF_TYPE(data, CV_32FC1) )
+    {
+        if( _centers && !ICV_IS_MAT_OF_TYPE (_centers, CV_32FC1) )
+        {
+            CV_ERROR(CV_StsBadArg,"");
+        }
+        else if( !_centers )
+            CV_CALL(centers = cvCreateMat (num_of_clusters, dim, CV_32FC1));
+    }
+    else if( ICV_IS_MAT_OF_TYPE(data, CV_64FC1) )
+    {
+        if( _centers && !ICV_IS_MAT_OF_TYPE (_centers, CV_64FC1) )
+        {
+            CV_ERROR(CV_StsBadArg,"");
+        }
+        else if( !_centers )
+            CV_CALL(centers = cvCreateMat (num_of_clusters, dim, CV_64FC1));
+    }
+    else
+        CV_ERROR (CV_StsBadArg,"");
+
+    if( num_of_clusters < 1 )
+        CV_ERROR (CV_StsBadArg,"");
+
+    rng = cvRNG(seed);
+    for (i = 0; i < dim; i++)
+    {
+        CV_CALL(cvGetCol (data, &data_comp, i));
+        CV_CALL(cvMinMaxLoc (&data_comp, &minVal, &maxVal, &minLoc, &maxLoc));
+        CV_CALL(cvGetCol (centers, &centers_comp, i));
+        CV_CALL(cvRandArr (&rng, &centers_comp, CV_RAND_UNI, cvScalarAll(minVal), cvScalarAll(maxVal)));
+    }
+
+    __END__;
+
+    if( (cvGetErrStatus () < 0) || (centers != _centers) )
+        cvReleaseMat (&centers);
+
+    return _centers ? _centers : centers;
+} // end of icvGenerateRandomClusterCenters
+
+// By S. Dilman - begin -
+
+#define ICV_RAND_MAX    4294967296 // == 2^32
+
+// static void cvRandRoundUni (CvMat* center,
+//                              float radius_small,
+//                              float radius_large,
+//                              CvMat* desired_matrix,
+//                              CvRNG* rng_state_ptr)
+// {
+//     float rad, norm, coefficient;
+//     int dim, size, i, j;
+//     CvMat *cov, sample;
+//     CvRNG rng_local;
+
+//     CV_FUNCNAME("cvRandRoundUni");
+//     __BEGIN__
+
+//     rng_local = *rng_state_ptr;
+
+//     CV_ASSERT ((radius_small >= 0) &&
+//                (radius_large > 0) &&
+//                (radius_small <= radius_large));
+//     CV_ASSERT (center && desired_matrix && rng_state_ptr);
+//     CV_ASSERT (center->rows == 1);
+//     CV_ASSERT (center->cols == desired_matrix->cols);
+
+//     dim = desired_matrix->cols;
+//     size = desired_matrix->rows;
+//     cov = cvCreateMat (dim, dim, CV_32FC1);
+//     cvSetIdentity (cov);
+//     cvRandMVNormal (center, cov, desired_matrix, &rng_local);
+
+//     for (i = 0; i < size; i++)
+//     {
+//         rad = (float)(cvRandReal(&rng_local)*(radius_large - radius_small) + radius_small);
+//         cvGetRow (desired_matrix, &sample, i);
+//         norm = (float) cvNorm (&sample, 0, CV_L2);
+//         coefficient = rad / norm;
+//         for (j = 0; j < dim; j++)
+//              CV_MAT_ELEM (sample, float, 0, j) *= coefficient;
+//     }
+
+//     __END__
+
+// }
+
+// By S. Dilman - end -
+
+static int CV_CDECL
+icvCmpIntegers( const void* a, const void* b )
+{
+    return *(const int*)a - *(const int*)b;
+}
+
+
+static int CV_CDECL
+icvCmpIntegersPtr( const void* _a, const void* _b )
+{
+    int a = **(const int**)_a;
+    int b = **(const int**)_b;
+    return (a < b ? -1 : 0)|(a > b);
+}
+
+
+static int icvCmpSparseVecElems( const void* a, const void* b )
+{
+    return ((CvSparseVecElem32f*)a)->idx - ((CvSparseVecElem32f*)b)->idx;
+}
+
+
+CvMat*
+cvPreprocessIndexArray( const CvMat* idx_arr, int data_arr_size, bool check_for_duplicates )
+{
+    CvMat* idx = 0;
+
+    CV_FUNCNAME( "cvPreprocessIndexArray" );
+
+    __BEGIN__;
+
+    int i, idx_total, idx_selected = 0, step, type, prev = INT_MIN, is_sorted = 1;
+    uchar* srcb = 0;
+    int* srci = 0;
+    int* dsti;
+
+    if( !CV_IS_MAT(idx_arr) )
+        CV_ERROR( CV_StsBadArg, "Invalid index array" );
+
+    if( idx_arr->rows != 1 && idx_arr->cols != 1 )
+        CV_ERROR( CV_StsBadSize, "the index array must be 1-dimensional" );
+
+    idx_total = idx_arr->rows + idx_arr->cols - 1;
+    srcb = idx_arr->data.ptr;
+    srci = idx_arr->data.i;
+
+    type = CV_MAT_TYPE(idx_arr->type);
+    step = CV_IS_MAT_CONT(idx_arr->type) ? 1 : idx_arr->step/CV_ELEM_SIZE(type);
+
+    switch( type )
+    {
+    case CV_8UC1:
+    case CV_8SC1:
+        // idx_arr is array of 1's and 0's -
+        // i.e. it is a mask of the selected components
+        if( idx_total != data_arr_size )
+            CV_ERROR( CV_StsUnmatchedSizes,
+            "Component mask should contain as many elements as the total number of input variables" );
+
+        for( i = 0; i < idx_total; i++ )
+            idx_selected += srcb[i*step] != 0;
+
+        if( idx_selected == 0 )
+            CV_ERROR( CV_StsOutOfRange, "No components/input_variables is selected!" );
+
+        break;
+    case CV_32SC1:
+        // idx_arr is array of integer indices of selected components
+        if( idx_total > data_arr_size )
+            CV_ERROR( CV_StsOutOfRange,
+            "index array may not contain more elements than the total number of input variables" );
+        idx_selected = idx_total;
+        // check if sorted already
+        for( i = 0; i < idx_total; i++ )
+        {
+            int val = srci[i*step];
+            if( val >= prev )
+            {
+                is_sorted = 0;
+                break;
+            }
+            prev = val;
+        }
+        break;
+    default:
+        CV_ERROR( CV_StsUnsupportedFormat, "Unsupported index array data type "
+                                           "(it should be 8uC1, 8sC1 or 32sC1)" );
+    }
+
+    CV_CALL( idx = cvCreateMat( 1, idx_selected, CV_32SC1 ));
+    dsti = idx->data.i;
+
+    if( type < CV_32SC1 )
+    {
+        for( i = 0; i < idx_total; i++ )
+            if( srcb[i*step] )
+                *dsti++ = i;
+    }
+    else
+    {
+        for( i = 0; i < idx_total; i++ )
+            dsti[i] = srci[i*step];
+
+        if( !is_sorted )
+            qsort( dsti, idx_total, sizeof(dsti[0]), icvCmpIntegers );
+
+        if( dsti[0] < 0 || dsti[idx_total-1] >= data_arr_size )
+            CV_ERROR( CV_StsOutOfRange, "the index array elements are out of range" );
+
+        if( check_for_duplicates )
+        {
+            for( i = 1; i < idx_total; i++ )
+                if( dsti[i] <= dsti[i-1] )
+                    CV_ERROR( CV_StsBadArg, "There are duplicated index array elements" );
+        }
+    }
+
+    __END__;
+
+    if( cvGetErrStatus() < 0 )
+        cvReleaseMat( &idx );
+
+    return idx;
+}
+
+
+CvMat*
+cvPreprocessVarType( const CvMat* var_type, const CvMat* var_idx,
+                     int var_count, int* response_type )
+{
+    CvMat* out_var_type = 0;
+    CV_FUNCNAME( "cvPreprocessVarType" );
+
+    if( response_type )
+        *response_type = -1;
+
+    __BEGIN__;
+
+    int i, tm_size, tm_step;
+    //int* map = 0;
+    const uchar* src;
+    uchar* dst;
+
+    if( !CV_IS_MAT(var_type) )
+        CV_ERROR( var_type ? CV_StsBadArg : CV_StsNullPtr, "Invalid or absent var_type array" );
+
+    if( var_type->rows != 1 && var_type->cols != 1 )
+        CV_ERROR( CV_StsBadSize, "var_type array must be 1-dimensional" );
+
+    if( !CV_IS_MASK_ARR(var_type))
+        CV_ERROR( CV_StsUnsupportedFormat, "type mask must be 8uC1 or 8sC1 array" );
+
+    tm_size = var_type->rows + var_type->cols - 1;
+    tm_step = var_type->rows == 1 ? 1 : var_type->step/CV_ELEM_SIZE(var_type->type);
+
+    if( /*tm_size != var_count &&*/ tm_size != var_count + 1 )
+        CV_ERROR( CV_StsBadArg,
+        "type mask must be of <input var count> + 1 size" );
+
+    if( response_type && tm_size > var_count )
+        *response_type = var_type->data.ptr[var_count*tm_step] != 0;
+
+    if( var_idx )
+    {
+        if( !CV_IS_MAT(var_idx) || CV_MAT_TYPE(var_idx->type) != CV_32SC1 ||
+            (var_idx->rows != 1 && var_idx->cols != 1) || !CV_IS_MAT_CONT(var_idx->type) )
+            CV_ERROR( CV_StsBadArg, "var index array should be continuous 1-dimensional integer vector" );
+        if( var_idx->rows + var_idx->cols - 1 > var_count )
+            CV_ERROR( CV_StsBadSize, "var index array is too large" );
+        //map = var_idx->data.i;
+        var_count = var_idx->rows + var_idx->cols - 1;
+    }
+
+    CV_CALL( out_var_type = cvCreateMat( 1, var_count, CV_8UC1 ));
+    src = var_type->data.ptr;
+    dst = out_var_type->data.ptr;
+
+    for( i = 0; i < var_count; i++ )
+    {
+        //int idx = map ? map[i] : i;
+        assert( (unsigned)/*idx*/i < (unsigned)tm_size );
+        dst[i] = (uchar)(src[/*idx*/i*tm_step] != 0);
+    }
+
+    __END__;
+
+    return out_var_type;
+}
+
+
+CvMat*
+cvPreprocessOrderedResponses( const CvMat* responses, const CvMat* sample_idx, int sample_all )
+{
+    CvMat* out_responses = 0;
+
+    CV_FUNCNAME( "cvPreprocessOrderedResponses" );
+
+    __BEGIN__;
+
+    int i, r_type, r_step;
+    const int* map = 0;
+    float* dst;
+    int sample_count = sample_all;
+
+    if( !CV_IS_MAT(responses) )
+        CV_ERROR( CV_StsBadArg, "Invalid response array" );
+
+    if( responses->rows != 1 && responses->cols != 1 )
+        CV_ERROR( CV_StsBadSize, "Response array must be 1-dimensional" );
+
+    if( responses->rows + responses->cols - 1 != sample_count )
+        CV_ERROR( CV_StsUnmatchedSizes,
+        "Response array must contain as many elements as the total number of samples" );
+
+    r_type = CV_MAT_TYPE(responses->type);
+    if( r_type != CV_32FC1 && r_type != CV_32SC1 )
+        CV_ERROR( CV_StsUnsupportedFormat, "Unsupported response type" );
+
+    r_step = responses->step ? responses->step / CV_ELEM_SIZE(responses->type) : 1;
+
+    if( r_type == CV_32FC1 && CV_IS_MAT_CONT(responses->type) && !sample_idx )
+    {
+        out_responses = cvCloneMat( responses );
+        EXIT;
+    }
+
+    if( sample_idx )
+    {
+        if( !CV_IS_MAT(sample_idx) || CV_MAT_TYPE(sample_idx->type) != CV_32SC1 ||
+            (sample_idx->rows != 1 && sample_idx->cols != 1) || !CV_IS_MAT_CONT(sample_idx->type) )
+            CV_ERROR( CV_StsBadArg, "sample index array should be continuous 1-dimensional integer vector" );
+        if( sample_idx->rows + sample_idx->cols - 1 > sample_count )
+            CV_ERROR( CV_StsBadSize, "sample index array is too large" );
+        map = sample_idx->data.i;
+        sample_count = sample_idx->rows + sample_idx->cols - 1;
+    }
+
+    CV_CALL( out_responses = cvCreateMat( 1, sample_count, CV_32FC1 ));
+
+    dst = out_responses->data.fl;
+    if( r_type == CV_32FC1 )
+    {
+        const float* src = responses->data.fl;
+        for( i = 0; i < sample_count; i++ )
+        {
+            int idx = map ? map[i] : i;
+            assert( (unsigned)idx < (unsigned)sample_all );
+            dst[i] = src[idx*r_step];
+        }
+    }
+    else
+    {
+        const int* src = responses->data.i;
+        for( i = 0; i < sample_count; i++ )
+        {
+            int idx = map ? map[i] : i;
+            assert( (unsigned)idx < (unsigned)sample_all );
+            dst[i] = (float)src[idx*r_step];
+        }
+    }
+
+    __END__;
+
+    return out_responses;
+}
+
+CvMat*
+cvPreprocessCategoricalResponses( const CvMat* responses,
+    const CvMat* sample_idx, int sample_all,
+    CvMat** out_response_map, CvMat** class_counts )
+{
+    CvMat* out_responses = 0;
+    int** response_ptr = 0;
+
+    CV_FUNCNAME( "cvPreprocessCategoricalResponses" );
+
+    if( out_response_map )
+        *out_response_map = 0;
+
+    if( class_counts )
+        *class_counts = 0;
+
+    __BEGIN__;
+
+    int i, r_type, r_step;
+    int cls_count = 1, prev_cls, prev_i;
+    const int* map = 0;
+    const int* srci;
+    const float* srcfl;
+    int* dst;
+    int* cls_map;
+    int* cls_counts = 0;
+    int sample_count = sample_all;
+
+    if( !CV_IS_MAT(responses) )
+        CV_ERROR( CV_StsBadArg, "Invalid response array" );
+
+    if( responses->rows != 1 && responses->cols != 1 )
+        CV_ERROR( CV_StsBadSize, "Response array must be 1-dimensional" );
+
+    if( responses->rows + responses->cols - 1 != sample_count )
+        CV_ERROR( CV_StsUnmatchedSizes,
+        "Response array must contain as many elements as the total number of samples" );
+
+    r_type = CV_MAT_TYPE(responses->type);
+    if( r_type != CV_32FC1 && r_type != CV_32SC1 )
+        CV_ERROR( CV_StsUnsupportedFormat, "Unsupported response type" );
+
+    r_step = responses->rows == 1 ? 1 : responses->step / CV_ELEM_SIZE(responses->type);
+
+    if( sample_idx )
+    {
+        if( !CV_IS_MAT(sample_idx) || CV_MAT_TYPE(sample_idx->type) != CV_32SC1 ||
+            (sample_idx->rows != 1 && sample_idx->cols != 1) || !CV_IS_MAT_CONT(sample_idx->type) )
+            CV_ERROR( CV_StsBadArg, "sample index array should be continuous 1-dimensional integer vector" );
+        if( sample_idx->rows + sample_idx->cols - 1 > sample_count )
+            CV_ERROR( CV_StsBadSize, "sample index array is too large" );
+        map = sample_idx->data.i;
+        sample_count = sample_idx->rows + sample_idx->cols - 1;
+    }
+
+    CV_CALL( out_responses = cvCreateMat( 1, sample_count, CV_32SC1 ));
+
+    if( !out_response_map )
+        CV_ERROR( CV_StsNullPtr, "out_response_map pointer is NULL" );
+
+    CV_CALL( response_ptr = (int**)cvAlloc( sample_count*sizeof(response_ptr[0])));
+
+    srci = responses->data.i;
+    srcfl = responses->data.fl;
+    dst = out_responses->data.i;
+
+    for( i = 0; i < sample_count; i++ )
+    {
+        int idx = map ? map[i] : i;
+        assert( (unsigned)idx < (unsigned)sample_all );
+        if( r_type == CV_32SC1 )
+            dst[i] = srci[idx*r_step];
+        else
+        {
+            float rf = srcfl[idx*r_step];
+            int ri = cvRound(rf);
+            if( ri != rf )
+            {
+                char buf[100];
+                sprintf( buf, "response #%d is not integral", idx );
+                CV_ERROR( CV_StsBadArg, buf );
+            }
+            dst[i] = ri;
+        }
+        response_ptr[i] = dst + i;
+    }
+
+    qsort( response_ptr, sample_count, sizeof(int*), icvCmpIntegersPtr );
+
+    // count the classes
+    for( i = 1; i < sample_count; i++ )
+        cls_count += *response_ptr[i] != *response_ptr[i-1];
+
+    if( cls_count < 2 )
+        CV_ERROR( CV_StsBadArg, "There is only a single class" );
+
+    CV_CALL( *out_response_map = cvCreateMat( 1, cls_count, CV_32SC1 ));
+
+    if( class_counts )
+    {
+        CV_CALL( *class_counts = cvCreateMat( 1, cls_count, CV_32SC1 ));
+        cls_counts = (*class_counts)->data.i;
+    }
+
+    // compact the class indices and build the map
+    prev_cls = ~*response_ptr[0];
+    cls_count = -1;
+    cls_map = (*out_response_map)->data.i;
+
+    for( i = 0, prev_i = -1; i < sample_count; i++ )
+    {
+        int cur_cls = *response_ptr[i];
+        if( cur_cls != prev_cls )
+        {
+            if( cls_counts && cls_count >= 0 )
+                cls_counts[cls_count] = i - prev_i;
+            cls_map[++cls_count] = prev_cls = cur_cls;
+            prev_i = i;
+        }
+        *response_ptr[i] = cls_count;
+    }
+
+    if( cls_counts )
+        cls_counts[cls_count] = i - prev_i;
+
+    __END__;
+
+    cvFree( &response_ptr );
+
+    return out_responses;
+}
+
+
+const float**
+cvGetTrainSamples( const CvMat* train_data, int tflag,
+                   const CvMat* var_idx, const CvMat* sample_idx,
+                   int* _var_count, int* _sample_count,
+                   bool always_copy_data )
+{
+    float** samples = 0;
+
+    CV_FUNCNAME( "cvGetTrainSamples" );
+
+    __BEGIN__;
+
+    int i, j, var_count, sample_count, s_step, v_step;
+    bool copy_data;
+    const float* data;
+    const int *s_idx, *v_idx;
+
+    if( !CV_IS_MAT(train_data) )
+        CV_ERROR( CV_StsBadArg, "Invalid or NULL training data matrix" );
+
+    var_count = var_idx ? var_idx->cols + var_idx->rows - 1 :
+                tflag == CV_ROW_SAMPLE ? train_data->cols : train_data->rows;
+    sample_count = sample_idx ? sample_idx->cols + sample_idx->rows - 1 :
+                   tflag == CV_ROW_SAMPLE ? train_data->rows : train_data->cols;
+
+    if( _var_count )
+        *_var_count = var_count;
+
+    if( _sample_count )
+        *_sample_count = sample_count;
+
+    copy_data = tflag != CV_ROW_SAMPLE || var_idx || always_copy_data;
+
+    CV_CALL( samples = (float**)cvAlloc(sample_count*sizeof(samples[0]) +
+                (copy_data ? 1 : 0)*var_count*sample_count*sizeof(samples[0][0])) );
+    data = train_data->data.fl;
+    s_step = train_data->step / sizeof(samples[0][0]);
+    v_step = 1;
+    s_idx = sample_idx ? sample_idx->data.i : 0;
+    v_idx = var_idx ? var_idx->data.i : 0;
+
+    if( !copy_data )
+    {
+        for( i = 0; i < sample_count; i++ )
+            samples[i] = (float*)(data + (s_idx ? s_idx[i] : i)*s_step);
+    }
+    else
+    {
+        samples[0] = (float*)(samples + sample_count);
+        if( tflag != CV_ROW_SAMPLE )
+            CV_SWAP( s_step, v_step, i );
+
+        for( i = 0; i < sample_count; i++ )
+        {
+            float* dst = samples[i] = samples[0] + i*var_count;
+            const float* src = data + (s_idx ? s_idx[i] : i)*s_step;
+
+            if( !v_idx )
+                for( j = 0; j < var_count; j++ )
+                    dst[j] = src[j*v_step];
+            else
+                for( j = 0; j < var_count; j++ )
+                    dst[j] = src[v_idx[j]*v_step];
+        }
+    }
+
+    __END__;
+
+    return (const float**)samples;
+}
+
+
+void
+cvCheckTrainData( const CvMat* train_data, int tflag,
+                  const CvMat* missing_mask,
+                  int* var_all, int* sample_all )
+{
+    CV_FUNCNAME( "cvCheckTrainData" );
+
+    if( var_all )
+        *var_all = 0;
+
+    if( sample_all )
+        *sample_all = 0;
+
+    __BEGIN__;
+
+    // check parameter types and sizes
+    if( !CV_IS_MAT(train_data) || CV_MAT_TYPE(train_data->type) != CV_32FC1 )
+        CV_ERROR( CV_StsBadArg, "train data must be floating-point matrix" );
+
+    if( missing_mask )
+    {
+        if( !CV_IS_MAT(missing_mask) || !CV_IS_MASK_ARR(missing_mask) ||
+            !CV_ARE_SIZES_EQ(train_data, missing_mask) )
+            CV_ERROR( CV_StsBadArg,
+            "missing value mask must be 8-bit matrix of the same size as training data" );
+    }
+
+    if( tflag != CV_ROW_SAMPLE && tflag != CV_COL_SAMPLE )
+        CV_ERROR( CV_StsBadArg,
+        "Unknown training data layout (must be CV_ROW_SAMPLE or CV_COL_SAMPLE)" );
+
+    if( var_all )
+        *var_all = tflag == CV_ROW_SAMPLE ? train_data->cols : train_data->rows;
+
+    if( sample_all )
+        *sample_all = tflag == CV_ROW_SAMPLE ? train_data->rows : train_data->cols;
+
+    __END__;
+}
+
+
+int
+cvPrepareTrainData( const char* /*funcname*/,
+                    const CvMat* train_data, int tflag,
+                    const CvMat* responses, int response_type,
+                    const CvMat* var_idx,
+                    const CvMat* sample_idx,
+                    bool always_copy_data,
+                    const float*** out_train_samples,
+                    int* _sample_count,
+                    int* _var_count,
+                    int* _var_all,
+                    CvMat** out_responses,
+                    CvMat** out_response_map,
+                    CvMat** out_var_idx,
+                    CvMat** out_sample_idx )
+{
+    int ok = 0;
+    CvMat* _var_idx = 0;
+    CvMat* _sample_idx = 0;
+    CvMat* _responses = 0;
+    int sample_all = 0, sample_count = 0, var_all = 0, var_count = 0;
+
+    CV_FUNCNAME( "cvPrepareTrainData" );
+
+    // step 0. clear all the output pointers to ensure we do not try
+    // to call free() with uninitialized pointers
+    if( out_responses )
+        *out_responses = 0;
+
+    if( out_response_map )
+        *out_response_map = 0;
+
+    if( out_var_idx )
+        *out_var_idx = 0;
+
+    if( out_sample_idx )
+        *out_sample_idx = 0;
+
+    if( out_train_samples )
+        *out_train_samples = 0;
+
+    if( _sample_count )
+        *_sample_count = 0;
+
+    if( _var_count )
+        *_var_count = 0;
+
+    if( _var_all )
+        *_var_all = 0;
+
+    __BEGIN__;
+
+    if( !out_train_samples )
+        CV_ERROR( CV_StsBadArg, "output pointer to train samples is NULL" );
+
+    CV_CALL( cvCheckTrainData( train_data, tflag, 0, &var_all, &sample_all ));
+
+    if( sample_idx )
+        CV_CALL( _sample_idx = cvPreprocessIndexArray( sample_idx, sample_all ));
+    if( var_idx )
+        CV_CALL( _var_idx = cvPreprocessIndexArray( var_idx, var_all ));
+
+    if( responses )
+    {
+        if( !out_responses )
+            CV_ERROR( CV_StsNullPtr, "output response pointer is NULL" );
+
+        if( response_type == CV_VAR_NUMERICAL )
+        {
+            CV_CALL( _responses = cvPreprocessOrderedResponses( responses,
+                                                _sample_idx, sample_all ));
+        }
+        else
+        {
+            CV_CALL( _responses = cvPreprocessCategoricalResponses( responses,
+                                _sample_idx, sample_all, out_response_map, 0 ));
+        }
+    }
+
+    CV_CALL( *out_train_samples =
+                cvGetTrainSamples( train_data, tflag, _var_idx, _sample_idx,
+                                   &var_count, &sample_count, always_copy_data ));
+
+    ok = 1;
+
+    __END__;
+
+    if( ok )
+    {
+        if( out_responses )
+            *out_responses = _responses, _responses = 0;
+
+        if( out_var_idx )
+            *out_var_idx = _var_idx, _var_idx = 0;
+
+        if( out_sample_idx )
+            *out_sample_idx = _sample_idx, _sample_idx = 0;
+
+        if( _sample_count )
+            *_sample_count = sample_count;
+
+        if( _var_count )
+            *_var_count = var_count;
+
+        if( _var_all )
+            *_var_all = var_all;
+    }
+    else
+    {
+        if( out_response_map )
+            cvReleaseMat( out_response_map );
+        cvFree( out_train_samples );
+    }
+
+    if( _responses != responses )
+        cvReleaseMat( &_responses );
+    cvReleaseMat( &_var_idx );
+    cvReleaseMat( &_sample_idx );
+
+    return ok;
+}
+
+
+typedef struct CvSampleResponsePair
+{
+    const float* sample;
+    const uchar* mask;
+    int response;
+    int index;
+}
+CvSampleResponsePair;
+
+
+static int
+CV_CDECL icvCmpSampleResponsePairs( const void* a, const void* b )
+{
+    int ra = ((const CvSampleResponsePair*)a)->response;
+    int rb = ((const CvSampleResponsePair*)b)->response;
+    int ia = ((const CvSampleResponsePair*)a)->index;
+    int ib = ((const CvSampleResponsePair*)b)->index;
+
+    return ra < rb ? -1 : ra > rb ? 1 : ia - ib;
+    //return (ra > rb ? -1 : 0)|(ra < rb);
+}
+
+
+void
+cvSortSamplesByClasses( const float** samples, const CvMat* classes,
+                        int* class_ranges, const uchar** mask )
+{
+    CvSampleResponsePair* pairs = 0;
+    CV_FUNCNAME( "cvSortSamplesByClasses" );
+
+    __BEGIN__;
+
+    int i, k = 0, sample_count;
+
+    if( !samples || !classes || !class_ranges )
+        CV_ERROR( CV_StsNullPtr, "INTERNAL ERROR: some of the args are NULL pointers" );
+
+    if( classes->rows != 1 || CV_MAT_TYPE(classes->type) != CV_32SC1 )
+        CV_ERROR( CV_StsBadArg, "classes array must be a single row of integers" );
+
+    sample_count = classes->cols;
+    CV_CALL( pairs = (CvSampleResponsePair*)cvAlloc( (sample_count+1)*sizeof(pairs[0])));
+
+    for( i = 0; i < sample_count; i++ )
+    {
+        pairs[i].sample = samples[i];
+        pairs[i].mask = (mask) ? (mask[i]) : 0;
+        pairs[i].response = classes->data.i[i];
+        pairs[i].index = i;
+        assert( classes->data.i[i] >= 0 );
+    }
+
+    qsort( pairs, sample_count, sizeof(pairs[0]), icvCmpSampleResponsePairs );
+    pairs[sample_count].response = -1;
+    class_ranges[0] = 0;
+
+    for( i = 0; i < sample_count; i++ )
+    {
+        samples[i] = pairs[i].sample;
+        if (mask)
+            mask[i] = pairs[i].mask;
+        classes->data.i[i] = pairs[i].response;
+
+        if( pairs[i].response != pairs[i+1].response )
+            class_ranges[++k] = i+1;
+    }
+
+    __END__;
+
+    cvFree( &pairs );
+}
+
+
+void
+cvPreparePredictData( const CvArr* _sample, int dims_all,
+                      const CvMat* comp_idx, int class_count,
+                      const CvMat* prob, float** _row_sample,
+                      int as_sparse )
+{
+    float* row_sample = 0;
+    int* inverse_comp_idx = 0;
+
+    CV_FUNCNAME( "cvPreparePredictData" );
+
+    __BEGIN__;
+
+    const CvMat* sample = (const CvMat*)_sample;
+    float* sample_data;
+    int sample_step;
+    int is_sparse = CV_IS_SPARSE_MAT(sample);
+    int d, sizes[CV_MAX_DIM];
+    int i, dims_selected;
+    int vec_size;
+
+    if( !is_sparse && !CV_IS_MAT(sample) )
+        CV_ERROR( !sample ? CV_StsNullPtr : CV_StsBadArg, "The sample is not a valid vector" );
+
+    if( cvGetElemType( sample ) != CV_32FC1 )
+        CV_ERROR( CV_StsUnsupportedFormat, "Input sample must have 32fC1 type" );
+
+    CV_CALL( d = cvGetDims( sample, sizes ));
+
+    if( !((is_sparse && d == 1) || (!is_sparse && d == 2 && (sample->rows == 1 || sample->cols == 1))) )
+        CV_ERROR( CV_StsBadSize, "Input sample must be 1-dimensional vector" );
+
+    if( d == 1 )
+        sizes[1] = 1;
+
+    if( sizes[0] + sizes[1] - 1 != dims_all )
+        CV_ERROR( CV_StsUnmatchedSizes,
+        "The sample size is different from what has been used for training" );
+
+    if( !_row_sample )
+        CV_ERROR( CV_StsNullPtr, "INTERNAL ERROR: The row_sample pointer is NULL" );
+
+    if( comp_idx && (!CV_IS_MAT(comp_idx) || comp_idx->rows != 1 ||
+        CV_MAT_TYPE(comp_idx->type) != CV_32SC1) )
+        CV_ERROR( CV_StsBadArg, "INTERNAL ERROR: invalid comp_idx" );
+
+    dims_selected = comp_idx ? comp_idx->cols : dims_all;
+
+    if( prob )
+    {
+        if( !CV_IS_MAT(prob) )
+            CV_ERROR( CV_StsBadArg, "The output matrix of probabilities is invalid" );
+
+        if( (prob->rows != 1 && prob->cols != 1) ||
+            (CV_MAT_TYPE(prob->type) != CV_32FC1 &&
+            CV_MAT_TYPE(prob->type) != CV_64FC1) )
+            CV_ERROR( CV_StsBadSize,
+            "The matrix of probabilities must be 1-dimensional vector of 32fC1 type" );
+
+        if( prob->rows + prob->cols - 1 != class_count )
+            CV_ERROR( CV_StsUnmatchedSizes,
+            "The vector of probabilities must contain as many elements as "
+            "the number of classes in the training set" );
+    }
+
+    vec_size = !as_sparse ? dims_selected*sizeof(row_sample[0]) :
+                (dims_selected + 1)*sizeof(CvSparseVecElem32f);
+
+    if( CV_IS_MAT(sample) )
+    {
+        sample_data = sample->data.fl;
+        sample_step = CV_IS_MAT_CONT(sample->type) ? 1 : sample->step/sizeof(row_sample[0]);
+
+        if( !comp_idx && CV_IS_MAT_CONT(sample->type) && !as_sparse )
+            *_row_sample = sample_data;
+        else
+        {
+            CV_CALL( row_sample = (float*)cvAlloc( vec_size ));
+
+            if( !comp_idx )
+                for( i = 0; i < dims_selected; i++ )
+                    row_sample[i] = sample_data[sample_step*i];
+            else
+            {
+                int* comp = comp_idx->data.i;
+                for( i = 0; i < dims_selected; i++ )
+                    row_sample[i] = sample_data[sample_step*comp[i]];
+            }
+
+            *_row_sample = row_sample;
+        }
+
+        if( as_sparse )
+        {
+            const float* src = (const float*)row_sample;
+            CvSparseVecElem32f* dst = (CvSparseVecElem32f*)row_sample;
+
+            dst[dims_selected].idx = -1;
+            for( i = dims_selected - 1; i >= 0; i-- )
+            {
+                dst[i].idx = i;
+                dst[i].val = src[i];
+            }
+        }
+    }
+    else
+    {
+        CvSparseNode* node;
+        CvSparseMatIterator mat_iterator;
+        const CvSparseMat* sparse = (const CvSparseMat*)sample;
+        assert( is_sparse );
+
+        node = cvInitSparseMatIterator( sparse, &mat_iterator );
+        CV_CALL( row_sample = (float*)cvAlloc( vec_size ));
+
+        if( comp_idx )
+        {
+            CV_CALL( inverse_comp_idx = (int*)cvAlloc( dims_all*sizeof(int) ));
+            memset( inverse_comp_idx, -1, dims_all*sizeof(int) );
+            for( i = 0; i < dims_selected; i++ )
+                inverse_comp_idx[comp_idx->data.i[i]] = i;
+        }
+
+        if( !as_sparse )
+        {
+            memset( row_sample, 0, vec_size );
+
+            for( ; node != 0; node = cvGetNextSparseNode(&mat_iterator) )
+            {
+                int idx = *CV_NODE_IDX( sparse, node );
+                if( inverse_comp_idx )
+                {
+                    idx = inverse_comp_idx[idx];
+                    if( idx < 0 )
+                        continue;
+                }
+                row_sample[idx] = *(float*)CV_NODE_VAL( sparse, node );
+            }
+        }
+        else
+        {
+            CvSparseVecElem32f* ptr = (CvSparseVecElem32f*)row_sample;
+
+            for( ; node != 0; node = cvGetNextSparseNode(&mat_iterator) )
+            {
+                int idx = *CV_NODE_IDX( sparse, node );
+                if( inverse_comp_idx )
+                {
+                    idx = inverse_comp_idx[idx];
+                    if( idx < 0 )
+                        continue;
+                }
+                ptr->idx = idx;
+                ptr->val = *(float*)CV_NODE_VAL( sparse, node );
+                ptr++;
+            }
+
+            qsort( row_sample, ptr - (CvSparseVecElem32f*)row_sample,
+                   sizeof(ptr[0]), icvCmpSparseVecElems );
+            ptr->idx = -1;
+        }
+
+        *_row_sample = row_sample;
+    }
+
+    __END__;
+
+    if( inverse_comp_idx )
+        cvFree( &inverse_comp_idx );
+
+    if( cvGetErrStatus() < 0 && _row_sample )
+    {
+        cvFree( &row_sample );
+        *_row_sample = 0;
+    }
+}
+
+
+static void
+icvConvertDataToSparse( const uchar* src, int src_step, int src_type,
+                        uchar* dst, int dst_step, int dst_type,
+                        CvSize size, int* idx )
+{
+    CV_FUNCNAME( "icvConvertDataToSparse" );
+
+    __BEGIN__;
+
+    int i, j;
+    src_type = CV_MAT_TYPE(src_type);
+    dst_type = CV_MAT_TYPE(dst_type);
+
+    if( CV_MAT_CN(src_type) != 1 || CV_MAT_CN(dst_type) != 1 )
+        CV_ERROR( CV_StsUnsupportedFormat, "The function supports only single-channel arrays" );
+
+    if( src_step == 0 )
+        src_step = CV_ELEM_SIZE(src_type);
+
+    if( dst_step == 0 )
+        dst_step = CV_ELEM_SIZE(dst_type);
+
+    // if there is no "idx" and if both arrays are continuous,
+    // do the whole processing (copying or conversion) in a single loop
+    if( !idx && CV_ELEM_SIZE(src_type)*size.width == src_step &&
+        CV_ELEM_SIZE(dst_type)*size.width == dst_step )
+    {
+        size.width *= size.height;
+        size.height = 1;
+    }
+
+    if( src_type == dst_type )
+    {
+        int full_width = CV_ELEM_SIZE(dst_type)*size.width;
+
+        if( full_width == sizeof(int) ) // another common case: copy int's or float's
+            for( i = 0; i < size.height; i++, src += src_step )
+                *(int*)(dst + dst_step*(idx ? idx[i] : i)) = *(int*)src;
+        else
+            for( i = 0; i < size.height; i++, src += src_step )
+                memcpy( dst + dst_step*(idx ? idx[i] : i), src, full_width );
+    }
+    else if( src_type == CV_32SC1 && (dst_type == CV_32FC1 || dst_type == CV_64FC1) )
+        for( i = 0; i < size.height; i++, src += src_step )
+        {
+            uchar* _dst = dst + dst_step*(idx ? idx[i] : i);
+            if( dst_type == CV_32FC1 )
+                for( j = 0; j < size.width; j++ )
+                    ((float*)_dst)[j] = (float)((int*)src)[j];
+            else
+                for( j = 0; j < size.width; j++ )
+                    ((double*)_dst)[j] = ((int*)src)[j];
+        }
+    else if( (src_type == CV_32FC1 || src_type == CV_64FC1) && dst_type == CV_32SC1 )
+        for( i = 0; i < size.height; i++, src += src_step )
+        {
+            uchar* _dst = dst + dst_step*(idx ? idx[i] : i);
+            if( src_type == CV_32FC1 )
+                for( j = 0; j < size.width; j++ )
+                    ((int*)_dst)[j] = cvRound(((float*)src)[j]);
+            else
+                for( j = 0; j < size.width; j++ )
+                    ((int*)_dst)[j] = cvRound(((double*)src)[j]);
+        }
+    else if( (src_type == CV_32FC1 && dst_type == CV_64FC1) ||
+             (src_type == CV_64FC1 && dst_type == CV_32FC1) )
+        for( i = 0; i < size.height; i++, src += src_step )
+        {
+            uchar* _dst = dst + dst_step*(idx ? idx[i] : i);
+            if( src_type == CV_32FC1 )
+                for( j = 0; j < size.width; j++ )
+                    ((double*)_dst)[j] = ((float*)src)[j];
+            else
+                for( j = 0; j < size.width; j++ )
+                    ((float*)_dst)[j] = (float)((double*)src)[j];
+        }
+    else
+        CV_ERROR( CV_StsUnsupportedFormat, "Unsupported combination of input and output vectors" );
+
+    __END__;
+}
+
+
+void
+cvWritebackLabels( const CvMat* labels, CvMat* dst_labels,
+                   const CvMat* centers, CvMat* dst_centers,
+                   const CvMat* probs, CvMat* dst_probs,
+                   const CvMat* sample_idx, int samples_all,
+                   const CvMat* comp_idx, int dims_all )
+{
+    CV_FUNCNAME( "cvWritebackLabels" );
+
+    __BEGIN__;
+
+    int samples_selected = samples_all, dims_selected = dims_all;
+
+    if( dst_labels && !CV_IS_MAT(dst_labels) )
+        CV_ERROR( CV_StsBadArg, "Array of output labels is not a valid matrix" );
+
+    if( dst_centers )
+        if( !ICV_IS_MAT_OF_TYPE(dst_centers, CV_32FC1) &&
+            !ICV_IS_MAT_OF_TYPE(dst_centers, CV_64FC1) )
+            CV_ERROR( CV_StsBadArg, "Array of cluster centers is not a valid matrix" );
+
+    if( dst_probs && !CV_IS_MAT(dst_probs) )
+        CV_ERROR( CV_StsBadArg, "Probability matrix is not valid" );
+
+    if( sample_idx )
+    {
+        CV_ASSERT( sample_idx->rows == 1 && CV_MAT_TYPE(sample_idx->type) == CV_32SC1 );
+        samples_selected = sample_idx->cols;
+    }
+
+    if( comp_idx )
+    {
+        CV_ASSERT( comp_idx->rows == 1 && CV_MAT_TYPE(comp_idx->type) == CV_32SC1 );
+        dims_selected = comp_idx->cols;
+    }
+
+    if( dst_labels && (!labels || labels->data.ptr != dst_labels->data.ptr) )
+    {
+        if( !labels )
+            CV_ERROR( CV_StsNullPtr, "NULL labels" );
+
+        CV_ASSERT( labels->rows == 1 );
+
+        if( dst_labels->rows != 1 && dst_labels->cols != 1 )
+            CV_ERROR( CV_StsBadSize, "Array of output labels should be 1d vector" );
+
+        if( dst_labels->rows + dst_labels->cols - 1 != samples_all )
+            CV_ERROR( CV_StsUnmatchedSizes,
+            "Size of vector of output labels is not equal to the total number of input samples" );
+
+        CV_ASSERT( labels->cols == samples_selected );
+
+        CV_CALL( icvConvertDataToSparse( labels->data.ptr, labels->step, labels->type,
+                        dst_labels->data.ptr, dst_labels->step, dst_labels->type,
+                        cvSize( 1, samples_selected ), sample_idx ? sample_idx->data.i : 0 ));
+    }
+
+    if( dst_centers && (!centers || centers->data.ptr != dst_centers->data.ptr) )
+    {
+        int i;
+
+        if( !centers )
+            CV_ERROR( CV_StsNullPtr, "NULL centers" );
+
+        if( centers->rows != dst_centers->rows )
+            CV_ERROR( CV_StsUnmatchedSizes, "Invalid number of rows in matrix of output centers" );
+
+        if( dst_centers->cols != dims_all )
+            CV_ERROR( CV_StsUnmatchedSizes,
+            "Number of columns in matrix of output centers is "
+            "not equal to the total number of components in the input samples" );
+
+        CV_ASSERT( centers->cols == dims_selected );
+
+        for( i = 0; i < centers->rows; i++ )
+            CV_CALL( icvConvertDataToSparse( centers->data.ptr + i*centers->step, 0, centers->type,
+                        dst_centers->data.ptr + i*dst_centers->step, 0, dst_centers->type,
+                        cvSize( 1, dims_selected ), comp_idx ? comp_idx->data.i : 0 ));
+    }
+
+    if( dst_probs && (!probs || probs->data.ptr != dst_probs->data.ptr) )
+    {
+        if( !probs )
+            CV_ERROR( CV_StsNullPtr, "NULL probs" );
+
+        if( probs->cols != dst_probs->cols )
+            CV_ERROR( CV_StsUnmatchedSizes, "Invalid number of columns in output probability matrix" );
+
+        if( dst_probs->rows != samples_all )
+            CV_ERROR( CV_StsUnmatchedSizes,
+            "Number of rows in output probability matrix is "
+            "not equal to the total number of input samples" );
+
+        CV_ASSERT( probs->rows == samples_selected );
+
+        CV_CALL( icvConvertDataToSparse( probs->data.ptr, probs->step, probs->type,
+                        dst_probs->data.ptr, dst_probs->step, dst_probs->type,
+                        cvSize( probs->cols, samples_selected ),
+                        sample_idx ? sample_idx->data.i : 0 ));
+    }
+
+    __END__;
+}
+
+#if 0
+CV_IMPL void
+cvStatModelMultiPredict( const CvStatModel* stat_model,
+                         const CvArr* predict_input,
+                         int flags, CvMat* predict_output,
+                         CvMat* probs, const CvMat* sample_idx )
+{
+    CvMemStorage* storage = 0;
+    CvMat* sample_idx_buffer = 0;
+    CvSparseMat** sparse_rows = 0;
+    int samples_selected = 0;
+
+    CV_FUNCNAME( "cvStatModelMultiPredict" );
+
+    __BEGIN__;
+
+    int i;
+    int predict_output_step = 1, sample_idx_step = 1;
+    int type;
+    int d, sizes[CV_MAX_DIM];
+    int tflag = flags == CV_COL_SAMPLE;
+    int samples_all, dims_all;
+    int is_sparse = CV_IS_SPARSE_MAT(predict_input);
+    CvMat predict_input_part;
+    CvArr* sample = &predict_input_part;
+    CvMat probs_part;
+    CvMat* probs1 = probs ? &probs_part : 0;
+
+    if( !CV_IS_STAT_MODEL(stat_model) )
+        CV_ERROR( !stat_model ? CV_StsNullPtr : CV_StsBadArg, "Invalid statistical model" );
+
+    if( !stat_model->predict )
+        CV_ERROR( CV_StsNotImplemented, "There is no \"predict\" method" );
+
+    if( !predict_input || !predict_output )
+        CV_ERROR( CV_StsNullPtr, "NULL input or output matrices" );
+
+    if( !is_sparse && !CV_IS_MAT(predict_input) )
+        CV_ERROR( CV_StsBadArg, "predict_input should be a matrix or a sparse matrix" );
+
+    if( !CV_IS_MAT(predict_output) )
+        CV_ERROR( CV_StsBadArg, "predict_output should be a matrix" );
+
+    type = cvGetElemType( predict_input );
+    if( type != CV_32FC1 ||
+        (CV_MAT_TYPE(predict_output->type) != CV_32FC1 &&
+         CV_MAT_TYPE(predict_output->type) != CV_32SC1 ))
+         CV_ERROR( CV_StsUnsupportedFormat, "The input or output matrix has unsupported format" );
+
+    CV_CALL( d = cvGetDims( predict_input, sizes ));
+    if( d > 2 )
+        CV_ERROR( CV_StsBadSize, "The input matrix should be 1- or 2-dimensional" );
+
+    if( !tflag )
+    {
+        samples_all = samples_selected = sizes[0];
+        dims_all = sizes[1];
+    }
+    else
+    {
+        samples_all = samples_selected = sizes[1];
+        dims_all = sizes[0];
+    }
+
+    if( sample_idx )
+    {
+        if( !CV_IS_MAT(sample_idx) )
+            CV_ERROR( CV_StsBadArg, "Invalid sample_idx matrix" );
+
+        if( sample_idx->cols != 1 && sample_idx->rows != 1 )
+            CV_ERROR( CV_StsBadSize, "sample_idx must be 1-dimensional matrix" );
+
+        samples_selected = sample_idx->rows + sample_idx->cols - 1;
+
+        if( CV_MAT_TYPE(sample_idx->type) == CV_32SC1 )
+        {
+            if( samples_selected > samples_all )
+                CV_ERROR( CV_StsBadSize, "sample_idx is too large vector" );
+        }
+        else if( samples_selected != samples_all )
+            CV_ERROR( CV_StsUnmatchedSizes, "sample_idx has incorrect size" );
+
+        sample_idx_step = sample_idx->step ?
+            sample_idx->step / CV_ELEM_SIZE(sample_idx->type) : 1;
+    }
+
+    if( predict_output->rows != 1 && predict_output->cols != 1 )
+        CV_ERROR( CV_StsBadSize, "predict_output should be a 1-dimensional matrix" );
+
+    if( predict_output->rows + predict_output->cols - 1 != samples_all )
+        CV_ERROR( CV_StsUnmatchedSizes, "predict_output and predict_input have uncoordinated sizes" );
+
+    predict_output_step = predict_output->step ?
+        predict_output->step / CV_ELEM_SIZE(predict_output->type) : 1;
+
+    if( probs )
+    {
+        if( !CV_IS_MAT(probs) )
+            CV_ERROR( CV_StsBadArg, "Invalid matrix of probabilities" );
+
+        if( probs->rows != samples_all )
+            CV_ERROR( CV_StsUnmatchedSizes,
+            "matrix of probabilities must have as many rows as the total number of samples" );
+
+        if( CV_MAT_TYPE(probs->type) != CV_32FC1 )
+            CV_ERROR( CV_StsUnsupportedFormat, "matrix of probabilities must have 32fC1 type" );
+    }
+
+    if( is_sparse )
+    {
+        CvSparseNode* node;
+        CvSparseMatIterator mat_iterator;
+        CvSparseMat* sparse = (CvSparseMat*)predict_input;
+
+        if( sample_idx && CV_MAT_TYPE(sample_idx->type) == CV_32SC1 )
+        {
+            CV_CALL( sample_idx_buffer = cvCreateMat( 1, samples_all, CV_8UC1 ));
+            cvZero( sample_idx_buffer );
+            for( i = 0; i < samples_selected; i++ )
+                sample_idx_buffer->data.ptr[sample_idx->data.i[i*sample_idx_step]] = 1;
+            samples_selected = samples_all;
+            sample_idx = sample_idx_buffer;
+            sample_idx_step = 1;
+        }
+
+        CV_CALL( sparse_rows = (CvSparseMat**)cvAlloc( samples_selected*sizeof(sparse_rows[0])));
+        for( i = 0; i < samples_selected; i++ )
+        {
+            if( sample_idx && sample_idx->data.ptr[i*sample_idx_step] == 0 )
+                continue;
+            CV_CALL( sparse_rows[i] = cvCreateSparseMat( 1, &dims_all, type ));
+            if( !storage )
+                storage = sparse_rows[i]->heap->storage;
+            else
+            {
+                // hack: to decrease memory footprint, make all the sparse matrices
+                // reside in the same storage
+                int elem_size = sparse_rows[i]->heap->elem_size;
+                cvReleaseMemStorage( &sparse_rows[i]->heap->storage );
+                sparse_rows[i]->heap = cvCreateSet( 0, sizeof(CvSet), elem_size, storage );
+            }
+        }
+
+        // put each row (or column) of predict_input into separate sparse matrix.
+        node = cvInitSparseMatIterator( sparse, &mat_iterator );
+        for( ; node != 0; node = cvGetNextSparseNode( &mat_iterator ))
+        {
+            int* idx = CV_NODE_IDX( sparse, node );
+            int idx0 = idx[tflag ^ 1];
+            int idx1 = idx[tflag];
+
+            if( sample_idx && sample_idx->data.ptr[idx0*sample_idx_step] == 0 )
+                continue;
+
+            assert( sparse_rows[idx0] != 0 );
+            *(float*)cvPtrND( sparse, &idx1, 0, 1, 0 ) = *(float*)CV_NODE_VAL( sparse, node );
+        }
+    }
+
+    for( i = 0; i < samples_selected; i++ )
+    {
+        int idx = i;
+        float response;
+
+        if( sample_idx )
+        {
+            if( CV_MAT_TYPE(sample_idx->type) == CV_32SC1 )
+            {
+                idx = sample_idx->data.i[i*sample_idx_step];
+                if( (unsigned)idx >= (unsigned)samples_all )
+                    CV_ERROR( CV_StsOutOfRange, "Some of sample_idx elements are out of range" );
+            }
+            else if( CV_MAT_TYPE(sample_idx->type) == CV_8UC1 &&
+                     sample_idx->data.ptr[i*sample_idx_step] == 0 )
+                continue;
+        }
+
+        if( !is_sparse )
+        {
+            if( !tflag )
+                cvGetRow( predict_input, &predict_input_part, idx );
+            else
+            {
+                cvGetCol( predict_input, &predict_input_part, idx );
+            }
+        }
+        else
+            sample = sparse_rows[idx];
+
+        if( probs )
+            cvGetRow( probs, probs1, idx );
+
+        CV_CALL( response = stat_model->predict( stat_model, (CvMat*)sample, probs1 ));
+
+        if( CV_MAT_TYPE(predict_output->type) == CV_32FC1 )
+            predict_output->data.fl[idx*predict_output_step] = response;
+        else
+        {
+            CV_ASSERT( cvRound(response) == response );
+            predict_output->data.i[idx*predict_output_step] = cvRound(response);
+        }
+    }
+
+    __END__;
+
+    if( sparse_rows )
+    {
+        int i;
+        for( i = 0; i < samples_selected; i++ )
+            if( sparse_rows[i] )
+            {
+                sparse_rows[i]->heap->storage = 0;
+                cvReleaseSparseMat( &sparse_rows[i] );
+            }
+        cvFree( &sparse_rows );
+    }
+
+    cvReleaseMat( &sample_idx_buffer );
+    cvReleaseMemStorage( &storage );
+}
+#endif
+
+// By P. Yarykin - begin -
+
+void cvCombineResponseMaps (CvMat*  _responses,
+                      const CvMat*  old_response_map,
+                            CvMat*  new_response_map,
+                            CvMat** out_response_map)
+{
+    int** old_data = NULL;
+    int** new_data = NULL;
+
+        CV_FUNCNAME ("cvCombineResponseMaps");
+        __BEGIN__
+
+    int i,j;
+    int old_n, new_n, out_n;
+    int samples, free_response;
+    int* first;
+    int* responses;
+    int* out_data;
+
+    if( out_response_map )
+        *out_response_map = 0;
+
+// Check input data.
+    if ((!ICV_IS_MAT_OF_TYPE (_responses, CV_32SC1)) ||
+        (!ICV_IS_MAT_OF_TYPE (old_response_map, CV_32SC1)) ||
+        (!ICV_IS_MAT_OF_TYPE (new_response_map, CV_32SC1)))
+    {
+        CV_ERROR (CV_StsBadArg, "Some of input arguments is not the CvMat")
+    }
+
+// Prepare sorted responses.
+    first = new_response_map->data.i;
+    new_n = new_response_map->cols;
+    CV_CALL (new_data = (int**)cvAlloc (new_n * sizeof (new_data[0])));
+    for (i = 0; i < new_n; i++)
+        new_data[i] = first + i;
+    qsort (new_data, new_n, sizeof(int*), icvCmpIntegersPtr);
+
+    first = old_response_map->data.i;
+    old_n = old_response_map->cols;
+    CV_CALL (old_data = (int**)cvAlloc (old_n * sizeof (old_data[0])));
+    for (i = 0; i < old_n; i++)
+        old_data[i] = first + i;
+    qsort (old_data, old_n, sizeof(int*), icvCmpIntegersPtr);
+
+// Count the number of different responses.
+    for (i = 0, j = 0, out_n = 0; i < old_n && j < new_n; out_n++)
+    {
+        if (*old_data[i] == *new_data[j])
+        {
+            i++;
+            j++;
+        }
+        else if (*old_data[i] < *new_data[j])
+            i++;
+        else
+            j++;
+    }
+    out_n += old_n - i + new_n - j;
+
+// Create and fill the result response maps.
+    CV_CALL (*out_response_map = cvCreateMat (1, out_n, CV_32SC1));
+    out_data = (*out_response_map)->data.i;
+    memcpy (out_data, first, old_n * sizeof (int));
+
+    free_response = old_n;
+    for (i = 0, j = 0; i < old_n && j < new_n; )
+    {
+        if (*old_data[i] == *new_data[j])
+        {
+            *new_data[j] = (int)(old_data[i] - first);
+            i++;
+            j++;
+        }
+        else if (*old_data[i] < *new_data[j])
+            i++;
+        else
+        {
+            out_data[free_response] = *new_data[j];
+            *new_data[j] = free_response++;
+            j++;
+        }
+    }
+    for (; j < new_n; j++)
+    {
+        out_data[free_response] = *new_data[j];
+        *new_data[j] = free_response++;
+    }
+    CV_ASSERT (free_response == out_n);
+
+// Change <responses> according to out response map.
+    samples = _responses->cols + _responses->rows - 1;
+    responses = _responses->data.i;
+    first = new_response_map->data.i;
+    for (i = 0; i < samples; i++)
+    {
+        responses[i] = first[responses[i]];
+    }
+
+        __END__
+
+    cvFree(&old_data);
+    cvFree(&new_data);
+
+}
+
+
+static int icvGetNumberOfCluster( double* prob_vector, int num_of_clusters, float r,
+                           float outlier_thresh, int normalize_probs )
+{
+    int max_prob_loc = 0;
+
+    CV_FUNCNAME("icvGetNumberOfCluster");
+    __BEGIN__;
+
+    double prob, maxprob, sum;
+    int i;
+
+    CV_ASSERT(prob_vector);
+    CV_ASSERT(num_of_clusters >= 0);
+
+    maxprob = prob_vector[0];
+    max_prob_loc = 0;
+    sum = maxprob;
+    for( i = 1; i < num_of_clusters; i++ )
+    {
+        prob = prob_vector[i];
+        sum += prob;
+        if( prob > maxprob )
+        {
+            max_prob_loc = i;
+            maxprob = prob;
+        }
+    }
+    if( normalize_probs && fabs(sum - 1.) > FLT_EPSILON )
+    {
+        for( i = 0; i < num_of_clusters; i++ )
+            prob_vector[i] /= sum;
+    }
+    if( fabs(r - 1.) > FLT_EPSILON && fabs(sum - 1.) < outlier_thresh )
+        max_prob_loc = -1;
+
+    __END__;
+
+    return max_prob_loc;
+
+} // End of icvGetNumberOfCluster
+
+
+void icvFindClusterLabels( const CvMat* probs, float outlier_thresh, float r,
+                          const CvMat* labels )
+{
+    CvMat* counts = 0;
+
+    CV_FUNCNAME("icvFindClusterLabels");
+    __BEGIN__;
+
+    int nclusters, nsamples;
+    int i, j;
+    double* probs_data;
+
+    CV_ASSERT( ICV_IS_MAT_OF_TYPE(probs, CV_64FC1) );
+    CV_ASSERT( ICV_IS_MAT_OF_TYPE(labels, CV_32SC1) );
+
+    nclusters = probs->cols;
+    nsamples  = probs->rows;
+    CV_ASSERT( nsamples == labels->cols );
+
+    CV_CALL( counts = cvCreateMat( 1, nclusters + 1, CV_32SC1 ) );
+    CV_CALL( cvSetZero( counts ));
+    for( i = 0; i < nsamples; i++ )
+    {
+        labels->data.i[i] = icvGetNumberOfCluster( probs->data.db + i*probs->cols,
+            nclusters, r, outlier_thresh, 1 );
+        counts->data.i[labels->data.i[i] + 1]++;
+    }
+    CV_ASSERT((int)cvSum(counts).val[0] == nsamples);
+    // Filling empty clusters with the vector, that has the maximal probability
+    for( j = 0; j < nclusters; j++ ) // outliers are ignored
+    {
+        int maxprob_loc = -1;
+        double maxprob = 0;
+
+        if( counts->data.i[j+1] ) // j-th class is not empty
+            continue;
+        // look for the presentative, which is not lonely in it's cluster
+        // and that has a maximal probability among all these vectors
+        probs_data = probs->data.db;
+        for( i = 0; i < nsamples; i++, probs_data++ )
+        {
+            int label = labels->data.i[i];
+            double prob;
+            if( counts->data.i[label+1] == 0 ||
+                (counts->data.i[label+1] <= 1 && label != -1) )
+                continue;
+            prob = *probs_data;
+            if( prob >= maxprob )
+            {
+                maxprob = prob;
+                maxprob_loc = i;
+            }
+        }
+        // maxprob_loc == 0 <=> number of vectors less then number of clusters
+        CV_ASSERT( maxprob_loc >= 0 );
+        counts->data.i[labels->data.i[maxprob_loc] + 1]--;
+        labels->data.i[maxprob_loc] = j;
+        counts->data.i[j + 1]++;
+    }
+
+    __END__;
+
+    cvReleaseMat( &counts );
+} // End of icvFindClusterLabels
+
+/* End of file */
diff --git a/apps/traincascade/old_ml_precomp.hpp b/apps/traincascade/old_ml_precomp.hpp
new file mode 100644 (file)
index 0000000..32ae269
--- /dev/null
@@ -0,0 +1,376 @@
+/*M///////////////////////////////////////////////////////////////////////////////////////
+//
+//  IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
+//
+//  By downloading, copying, installing or using the software you agree to this license.
+//  If you do not agree to this license, do not download, install,
+//  copy or use the software.
+//
+//
+//                        Intel License Agreement
+//
+// Copyright (C) 2000, Intel Corporation, all rights reserved.
+// Third party copyrights are property of their respective owners.
+//
+// Redistribution and use in source and binary forms, with or without modification,
+// are permitted provided that the following conditions are met:
+//
+//   * Redistribution's of source code must retain the above copyright notice,
+//     this list of conditions and the following disclaimer.
+//
+//   * Redistribution's in binary form must reproduce the above copyright notice,
+//     this list of conditions and the following disclaimer in the documentation
+//     and/or other materials provided with the distribution.
+//
+//   * The name of Intel Corporation may not be used to endorse or promote products
+//     derived from this software without specific prior written permission.
+//
+// This software is provided by the copyright holders and contributors "as is" and
+// any express or implied warranties, including, but not limited to, the implied
+// warranties of merchantability and fitness for a particular purpose are disclaimed.
+// In no event shall the Intel Corporation or contributors be liable for any direct,
+// indirect, incidental, special, exemplary, or consequential damages
+// (including, but not limited to, procurement of substitute goods or services;
+// loss of use, data, or profits; or business interruption) however caused
+// and on any theory of liability, whether in contract, strict liability,
+// or tort (including negligence or otherwise) arising in any way out of
+// the use of this software, even if advised of the possibility of such damage.
+//
+//M*/
+
+#ifndef __OPENCV_PRECOMP_H__
+#define __OPENCV_PRECOMP_H__
+
+#include "opencv2/core.hpp"
+#include "old_ml.hpp"
+#include "opencv2/core/core_c.h"
+#include "opencv2/core/utility.hpp"
+
+#include "opencv2/core/private.hpp"
+
+#include <assert.h>
+#include <float.h>
+#include <limits.h>
+#include <math.h>
+#include <stdlib.h>
+#include <stdio.h>
+#include <string.h>
+#include <time.h>
+
+#define ML_IMPL CV_IMPL
+#define __BEGIN__ __CV_BEGIN__
+#define __END__ __CV_END__
+#define EXIT __CV_EXIT__
+
+#define CV_MAT_ELEM_FLAG( mat, type, comp, vect, tflag )    \
+    (( tflag == CV_ROW_SAMPLE )                             \
+    ? (CV_MAT_ELEM( mat, type, comp, vect ))                \
+    : (CV_MAT_ELEM( mat, type, vect, comp )))
+
+/* Convert matrix to vector */
+#define ICV_MAT2VEC( mat, vdata, vstep, num )      \
+    if( MIN( (mat).rows, (mat).cols ) != 1 )       \
+        CV_ERROR( CV_StsBadArg, "" );              \
+    (vdata) = ((mat).data.ptr);                    \
+    if( (mat).rows == 1 )                          \
+    {                                              \
+        (vstep) = CV_ELEM_SIZE( (mat).type );      \
+        (num) = (mat).cols;                        \
+    }                                              \
+    else                                           \
+    {                                              \
+        (vstep) = (mat).step;                      \
+        (num) = (mat).rows;                        \
+    }
+
+/* get raw data */
+#define ICV_RAWDATA( mat, flags, rdata, sstep, cstep, m, n )         \
+    (rdata) = (mat).data.ptr;                                        \
+    if( CV_IS_ROW_SAMPLE( flags ) )                                  \
+    {                                                                \
+        (sstep) = (mat).step;                                        \
+        (cstep) = CV_ELEM_SIZE( (mat).type );                        \
+        (m) = (mat).rows;                                            \
+        (n) = (mat).cols;                                            \
+    }                                                                \
+    else                                                             \
+    {                                                                \
+        (cstep) = (mat).step;                                        \
+        (sstep) = CV_ELEM_SIZE( (mat).type );                        \
+        (n) = (mat).rows;                                            \
+        (m) = (mat).cols;                                            \
+    }
+
+#define ICV_IS_MAT_OF_TYPE( mat, mat_type) \
+    (CV_IS_MAT( mat ) && CV_MAT_TYPE( mat->type ) == (mat_type) &&   \
+    (mat)->cols > 0 && (mat)->rows > 0)
+
+/*
+    uchar* data; int sstep, cstep;      - trainData->data
+    uchar* classes; int clstep; int ncl;- trainClasses
+    uchar* tmask; int tmstep; int ntm;  - typeMask
+    uchar* missed;int msstep, mcstep;   -missedMeasurements...
+    int mm, mn;                         == m,n == size,dim
+    uchar* sidx;int sistep;             - sampleIdx
+    uchar* cidx;int cistep;             - compIdx
+    int k, l;                           == n,m == dim,size (length of cidx, sidx)
+    int m, n;                           == size,dim
+*/
+#define ICV_DECLARE_TRAIN_ARGS()                                                    \
+    uchar* data;                                                                    \
+    int sstep, cstep;                                                               \
+    uchar* classes;                                                                 \
+    int clstep;                                                                     \
+    int ncl;                                                                        \
+    uchar* tmask;                                                                   \
+    int tmstep;                                                                     \
+    int ntm;                                                                        \
+    uchar* missed;                                                                  \
+    int msstep, mcstep;                                                             \
+    int mm, mn;                                                                     \
+    uchar* sidx;                                                                    \
+    int sistep;                                                                     \
+    uchar* cidx;                                                                    \
+    int cistep;                                                                     \
+    int k, l;                                                                       \
+    int m, n;                                                                       \
+                                                                                    \
+    data = classes = tmask = missed = sidx = cidx = NULL;                           \
+    sstep = cstep = clstep = ncl = tmstep = ntm = msstep = mcstep = mm = mn = 0;    \
+    sistep = cistep = k = l = m = n = 0;
+
+#define ICV_TRAIN_DATA_REQUIRED( param, flags )                                     \
+    if( !ICV_IS_MAT_OF_TYPE( (param), CV_32FC1 ) )                                  \
+    {                                                                               \
+        CV_ERROR( CV_StsBadArg, "Invalid " #param " parameter" );                   \
+    }                                                                               \
+    else                                                                            \
+    {                                                                               \
+        ICV_RAWDATA( *(param), (flags), data, sstep, cstep, m, n );                 \
+        k = n;                                                                      \
+        l = m;                                                                      \
+    }
+
+#define ICV_TRAIN_CLASSES_REQUIRED( param )                                         \
+    if( !ICV_IS_MAT_OF_TYPE( (param), CV_32FC1 ) )                                  \
+    {                                                                               \
+        CV_ERROR( CV_StsBadArg, "Invalid " #param " parameter" );                   \
+    }                                                                               \
+    else                                                                            \
+    {                                                                               \
+        ICV_MAT2VEC( *(param), classes, clstep, ncl );                              \
+        if( m != ncl )                                                              \
+        {                                                                           \
+            CV_ERROR( CV_StsBadArg, "Unmatched sizes" );                            \
+        }                                                                           \
+    }
+
+#define ICV_ARG_NULL( param )                                                       \
+    if( (param) != NULL )                                                           \
+    {                                                                               \
+        CV_ERROR( CV_StsBadArg, #param " parameter must be NULL" );                 \
+    }
+
+#define ICV_MISSED_MEASUREMENTS_OPTIONAL( param, flags )                            \
+    if( param )                                                                     \
+    {                                                                               \
+        if( !ICV_IS_MAT_OF_TYPE( param, CV_8UC1 ) )                                 \
+        {                                                                           \
+            CV_ERROR( CV_StsBadArg, "Invalid " #param " parameter" );               \
+        }                                                                           \
+        else                                                                        \
+        {                                                                           \
+            ICV_RAWDATA( *(param), (flags), missed, msstep, mcstep, mm, mn );       \
+            if( mm != m || mn != n )                                                \
+            {                                                                       \
+                CV_ERROR( CV_StsBadArg, "Unmatched sizes" );                        \
+            }                                                                       \
+        }                                                                           \
+    }
+
+#define ICV_COMP_IDX_OPTIONAL( param )                                              \
+    if( param )                                                                     \
+    {                                                                               \
+        if( !ICV_IS_MAT_OF_TYPE( param, CV_32SC1 ) )                                \
+        {                                                                           \
+            CV_ERROR( CV_StsBadArg, "Invalid " #param " parameter" );               \
+        }                                                                           \
+        else                                                                        \
+        {                                                                           \
+            ICV_MAT2VEC( *(param), cidx, cistep, k );                               \
+            if( k > n )                                                             \
+                CV_ERROR( CV_StsBadArg, "Invalid " #param " parameter" );           \
+        }                                                                           \
+    }
+
+#define ICV_SAMPLE_IDX_OPTIONAL( param )                                            \
+    if( param )                                                                     \
+    {                                                                               \
+        if( !ICV_IS_MAT_OF_TYPE( param, CV_32SC1 ) )                                \
+        {                                                                           \
+            CV_ERROR( CV_StsBadArg, "Invalid " #param " parameter" );               \
+        }                                                                           \
+        else                                                                        \
+        {                                                                           \
+            ICV_MAT2VEC( *sampleIdx, sidx, sistep, l );                             \
+            if( l > m )                                                             \
+                CV_ERROR( CV_StsBadArg, "Invalid " #param " parameter" );           \
+        }                                                                           \
+    }
+
+/****************************************************************************************/
+#define ICV_CONVERT_FLOAT_ARRAY_TO_MATRICE( array, matrice )        \
+{                                                                   \
+    CvMat a, b;                                                     \
+    int dims = (matrice)->cols;                                     \
+    int nsamples = (matrice)->rows;                                 \
+    int type = CV_MAT_TYPE((matrice)->type);                        \
+    int i, offset = dims;                                           \
+                                                                    \
+    CV_ASSERT( type == CV_32FC1 || type == CV_64FC1 );              \
+    offset *= ((type == CV_32FC1) ? sizeof(float) : sizeof(double));\
+                                                                    \
+    b = cvMat( 1, dims, CV_32FC1 );                                 \
+    cvGetRow( matrice, &a, 0 );                                     \
+    for( i = 0; i < nsamples; i++, a.data.ptr += offset )           \
+    {                                                               \
+        b.data.fl = (float*)array[i];                               \
+        CV_CALL( cvConvert( &b, &a ) );                             \
+    }                                                               \
+}
+
+/****************************************************************************************\
+*                       Auxiliary functions declarations                                 *
+\****************************************************************************************/
+
+/* Generates a set of classes centers in quantity <num_of_clusters> that are generated as
+   uniform random vectors in parallelepiped, where <data> is concentrated. Vectors in
+   <data> should have horizontal orientation. If <centers> != NULL, the function doesn't
+   allocate any memory and stores generated centers in <centers>, returns <centers>.
+   If <centers> == NULL, the function allocates memory and creates the matrice. Centers
+   are supposed to be oriented horizontally. */
+CvMat* icvGenerateRandomClusterCenters( int seed,
+                                        const CvMat* data,
+                                        int num_of_clusters,
+                                        CvMat* centers CV_DEFAULT(0));
+
+/* Fills the <labels> using <probs> by choosing the maximal probability. Outliers are
+   fixed by <oulier_tresh> and have cluster label (-1). Function also controls that there
+   weren't "empty" clusters by filling empty clusters with the maximal probability vector.
+   If probs_sums != NULL, filles it with the sums of probabilities for each sample (it is
+   useful for normalizing probabilities' matrice of FCM) */
+void icvFindClusterLabels( const CvMat* probs, float outlier_thresh, float r,
+                           const CvMat* labels );
+
+typedef struct CvSparseVecElem32f
+{
+    int idx;
+    float val;
+}
+CvSparseVecElem32f;
+
+/* Prepare training data and related parameters */
+#define CV_TRAIN_STATMODEL_DEFRAGMENT_TRAIN_DATA    1
+#define CV_TRAIN_STATMODEL_SAMPLES_AS_ROWS          2
+#define CV_TRAIN_STATMODEL_SAMPLES_AS_COLUMNS       4
+#define CV_TRAIN_STATMODEL_CATEGORICAL_RESPONSE     8
+#define CV_TRAIN_STATMODEL_ORDERED_RESPONSE         16
+#define CV_TRAIN_STATMODEL_RESPONSES_ON_OUTPUT      32
+#define CV_TRAIN_STATMODEL_ALWAYS_COPY_TRAIN_DATA   64
+#define CV_TRAIN_STATMODEL_SPARSE_AS_SPARSE         128
+
+int
+cvPrepareTrainData( const char* /*funcname*/,
+                    const CvMat* train_data, int tflag,
+                    const CvMat* responses, int response_type,
+                    const CvMat* var_idx,
+                    const CvMat* sample_idx,
+                    bool always_copy_data,
+                    const float*** out_train_samples,
+                    int* _sample_count,
+                    int* _var_count,
+                    int* _var_all,
+                    CvMat** out_responses,
+                    CvMat** out_response_map,
+                    CvMat** out_var_idx,
+                    CvMat** out_sample_idx=0 );
+
+void
+cvSortSamplesByClasses( const float** samples, const CvMat* classes,
+                        int* class_ranges, const uchar** mask CV_DEFAULT(0) );
+
+void
+cvCombineResponseMaps (CvMat*  _responses,
+                 const CvMat*  old_response_map,
+                       CvMat*  new_response_map,
+                       CvMat** out_response_map);
+
+void
+cvPreparePredictData( const CvArr* sample, int dims_all, const CvMat* comp_idx,
+                      int class_count, const CvMat* prob, float** row_sample,
+                      int as_sparse CV_DEFAULT(0) );
+
+/* copies clustering [or batch "predict"] results
+   (labels and/or centers and/or probs) back to the output arrays */
+void
+cvWritebackLabels( const CvMat* labels, CvMat* dst_labels,
+                   const CvMat* centers, CvMat* dst_centers,
+                   const CvMat* probs, CvMat* dst_probs,
+                   const CvMat* sample_idx, int samples_all,
+                   const CvMat* comp_idx, int dims_all );
+#define cvWritebackResponses cvWritebackLabels
+
+#define XML_FIELD_NAME "_name"
+CvFileNode* icvFileNodeGetChild(CvFileNode* father, const char* name);
+CvFileNode* icvFileNodeGetChildArrayElem(CvFileNode* father, const char* name,int index);
+CvFileNode* icvFileNodeGetNext(CvFileNode* n, const char* name);
+
+
+void cvCheckTrainData( const CvMat* train_data, int tflag,
+                       const CvMat* missing_mask,
+                       int* var_all, int* sample_all );
+
+CvMat* cvPreprocessIndexArray( const CvMat* idx_arr, int data_arr_size, bool check_for_duplicates=false );
+
+CvMat* cvPreprocessVarType( const CvMat* type_mask, const CvMat* var_idx,
+                            int var_all, int* response_type );
+
+CvMat* cvPreprocessOrderedResponses( const CvMat* responses,
+                const CvMat* sample_idx, int sample_all );
+
+CvMat* cvPreprocessCategoricalResponses( const CvMat* responses,
+                const CvMat* sample_idx, int sample_all,
+                CvMat** out_response_map, CvMat** class_counts=0 );
+
+const float** cvGetTrainSamples( const CvMat* train_data, int tflag,
+                   const CvMat* var_idx, const CvMat* sample_idx,
+                   int* _var_count, int* _sample_count,
+                   bool always_copy_data=false );
+
+namespace cv
+{
+    struct DTreeBestSplitFinder
+    {
+        DTreeBestSplitFinder(){ splitSize = 0, tree = 0; node = 0; }
+        DTreeBestSplitFinder( CvDTree* _tree, CvDTreeNode* _node);
+        DTreeBestSplitFinder( const DTreeBestSplitFinder& finder, Split );
+        virtual ~DTreeBestSplitFinder() {}
+        virtual void operator()(const BlockedRange& range);
+        void join( DTreeBestSplitFinder& rhs );
+        Ptr<CvDTreeSplit> bestSplit;
+        Ptr<CvDTreeSplit> split;
+        int splitSize;
+        CvDTree* tree;
+        CvDTreeNode* node;
+    };
+
+    struct ForestTreeBestSplitFinder : DTreeBestSplitFinder
+    {
+        ForestTreeBestSplitFinder() : DTreeBestSplitFinder() {}
+        ForestTreeBestSplitFinder( CvForestTree* _tree, CvDTreeNode* _node );
+        ForestTreeBestSplitFinder( const ForestTreeBestSplitFinder& finder, Split );
+        virtual void operator()(const BlockedRange& range);
+    };
+}
+
+#endif /* __ML_H__ */
diff --git a/apps/traincascade/old_ml_tree.cpp b/apps/traincascade/old_ml_tree.cpp
new file mode 100644 (file)
index 0000000..b7e346c
--- /dev/null
@@ -0,0 +1,4151 @@
+/*M///////////////////////////////////////////////////////////////////////////////////////
+//
+//  IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
+//
+//  By downloading, copying, installing or using the software you agree to this license.
+//  If you do not agree to this license, do not download, install,
+//  copy or use the software.
+//
+//
+//                        Intel License Agreement
+//
+// Copyright (C) 2000, Intel Corporation, all rights reserved.
+// Third party copyrights are property of their respective owners.
+//
+// Redistribution and use in source and binary forms, with or without modification,
+// are permitted provided that the following conditions are met:
+//
+//   * Redistribution's of source code must retain the above copyright notice,
+//     this list of conditions and the following disclaimer.
+//
+//   * Redistribution's in binary form must reproduce the above copyright notice,
+//     this list of conditions and the following disclaimer in the documentation
+//     and/or other materials provided with the distribution.
+//
+//   * The name of Intel Corporation may not be used to endorse or promote products
+//     derived from this software without specific prior written permission.
+//
+// This software is provided by the copyright holders and contributors "as is" and
+// any express or implied warranties, including, but not limited to, the implied
+// warranties of merchantability and fitness for a particular purpose are disclaimed.
+// In no event shall the Intel Corporation or contributors be liable for any direct,
+// indirect, incidental, special, exemplary, or consequential damages
+// (including, but not limited to, procurement of substitute goods or services;
+// loss of use, data, or profits; or business interruption) however caused
+// and on any theory of liability, whether in contract, strict liability,
+// or tort (including negligence or otherwise) arising in any way out of
+// the use of this software, even if advised of the possibility of such damage.
+//
+//M*/
+
+#include "old_ml_precomp.hpp"
+#include <ctype.h>
+
+using namespace cv;
+
+static const float ord_nan = FLT_MAX*0.5f;
+static const int min_block_size = 1 << 16;
+static const int block_size_delta = 1 << 10;
+
+CvDTreeTrainData::CvDTreeTrainData()
+{
+    var_idx = var_type = cat_count = cat_ofs = cat_map =
+        priors = priors_mult = counts = direction = split_buf = responses_copy = 0;
+    buf = 0;
+    tree_storage = temp_storage = 0;
+
+    clear();
+}
+
+
+CvDTreeTrainData::CvDTreeTrainData( const CvMat* _train_data, int _tflag,
+                      const CvMat* _responses, const CvMat* _var_idx,
+                      const CvMat* _sample_idx, const CvMat* _var_type,
+                      const CvMat* _missing_mask, const CvDTreeParams& _params,
+                      bool _shared, bool _add_labels )
+{
+    var_idx = var_type = cat_count = cat_ofs = cat_map =
+        priors = priors_mult = counts = direction = split_buf = responses_copy = 0;
+    buf = 0;
+
+    tree_storage = temp_storage = 0;
+
+    set_data( _train_data, _tflag, _responses, _var_idx, _sample_idx,
+              _var_type, _missing_mask, _params, _shared, _add_labels );
+}
+
+
+CvDTreeTrainData::~CvDTreeTrainData()
+{
+    clear();
+}
+
+
+bool CvDTreeTrainData::set_params( const CvDTreeParams& _params )
+{
+    bool ok = false;
+
+    CV_FUNCNAME( "CvDTreeTrainData::set_params" );
+
+    __BEGIN__;
+
+    // set parameters
+    params = _params;
+
+    if( params.max_categories < 2 )
+        CV_ERROR( CV_StsOutOfRange, "params.max_categories should be >= 2" );
+    params.max_categories = MIN( params.max_categories, 15 );
+
+    if( params.max_depth < 0 )
+        CV_ERROR( CV_StsOutOfRange, "params.max_depth should be >= 0" );
+    params.max_depth = MIN( params.max_depth, 25 );
+
+    params.min_sample_count = MAX(params.min_sample_count,1);
+
+    if( params.cv_folds < 0 )
+        CV_ERROR( CV_StsOutOfRange,
+        "params.cv_folds should be =0 (the tree is not pruned) "
+        "or n>0 (tree is pruned using n-fold cross-validation)" );
+
+    if( params.cv_folds == 1 )
+        params.cv_folds = 0;
+
+    if( params.regression_accuracy < 0 )
+        CV_ERROR( CV_StsOutOfRange, "params.regression_accuracy should be >= 0" );
+
+    ok = true;
+
+    __END__;
+
+    return ok;
+}
+
+template<typename T>
+class LessThanPtr
+{
+public:
+    bool operator()(T* a, T* b) const { return *a < *b; }
+};
+
+template<typename T, typename Idx>
+class LessThanIdx
+{
+public:
+    LessThanIdx( const T* _arr ) : arr(_arr) {}
+    bool operator()(Idx a, Idx b) const { return arr[a] < arr[b]; }
+    const T* arr;
+};
+
+class LessThanPairs
+{
+public:
+    bool operator()(const CvPair16u32s& a, const CvPair16u32s& b) const { return *a.i < *b.i; }
+};
+
+void CvDTreeTrainData::set_data( const CvMat* _train_data, int _tflag,
+    const CvMat* _responses, const CvMat* _var_idx, const CvMat* _sample_idx,
+    const CvMat* _var_type, const CvMat* _missing_mask, const CvDTreeParams& _params,
+    bool _shared, bool _add_labels, bool _update_data )
+{
+    CvMat* sample_indices = 0;
+    CvMat* var_type0 = 0;
+    CvMat* tmp_map = 0;
+    int** int_ptr = 0;
+    CvPair16u32s* pair16u32s_ptr = 0;
+    CvDTreeTrainData* data = 0;
+    float *_fdst = 0;
+    int *_idst = 0;
+    unsigned short* udst = 0;
+    int* idst = 0;
+
+    CV_FUNCNAME( "CvDTreeTrainData::set_data" );
+
+    __BEGIN__;
+
+    int sample_all = 0, r_type, cv_n;
+    int total_c_count = 0;
+    int tree_block_size, temp_block_size, max_split_size, nv_size, cv_size = 0;
+    int ds_step, dv_step, ms_step = 0, mv_step = 0; // {data|mask}{sample|var}_step
+    int vi, i, size;
+    char err[100];
+    const int *sidx = 0, *vidx = 0;
+
+    uint64 effective_buf_size = 0;
+    int effective_buf_height = 0, effective_buf_width = 0;
+
+    if( _update_data && data_root )
+    {
+        data = new CvDTreeTrainData( _train_data, _tflag, _responses, _var_idx,
+            _sample_idx, _var_type, _missing_mask, _params, _shared, _add_labels );
+
+        // compare new and old train data
+        if( !(data->var_count == var_count &&
+            cvNorm( data->var_type, var_type, CV_C ) < FLT_EPSILON &&
+            cvNorm( data->cat_count, cat_count, CV_C ) < FLT_EPSILON &&
+            cvNorm( data->cat_map, cat_map, CV_C ) < FLT_EPSILON) )
+            CV_ERROR( CV_StsBadArg,
+            "The new training data must have the same types and the input and output variables "
+            "and the same categories for categorical variables" );
+
+        cvReleaseMat( &priors );
+        cvReleaseMat( &priors_mult );
+        cvReleaseMat( &buf );
+        cvReleaseMat( &direction );
+        cvReleaseMat( &split_buf );
+        cvReleaseMemStorage( &temp_storage );
+
+        priors = data->priors; data->priors = 0;
+        priors_mult = data->priors_mult; data->priors_mult = 0;
+        buf = data->buf; data->buf = 0;
+        buf_count = data->buf_count; buf_size = data->buf_size;
+        sample_count = data->sample_count;
+
+        direction = data->direction; data->direction = 0;
+        split_buf = data->split_buf; data->split_buf = 0;
+        temp_storage = data->temp_storage; data->temp_storage = 0;
+        nv_heap = data->nv_heap; cv_heap = data->cv_heap;
+
+        data_root = new_node( 0, sample_count, 0, 0 );
+        EXIT;
+    }
+
+    clear();
+
+    var_all = 0;
+    rng = &cv::theRNG();
+
+    CV_CALL( set_params( _params ));
+
+    // check parameter types and sizes
+    CV_CALL( cvCheckTrainData( _train_data, _tflag, _missing_mask, &var_all, &sample_all ));
+
+    train_data = _train_data;
+    responses = _responses;
+
+    if( _tflag == CV_ROW_SAMPLE )
+    {
+        ds_step = _train_data->step/CV_ELEM_SIZE(_train_data->type);
+        dv_step = 1;
+        if( _missing_mask )
+            ms_step = _missing_mask->step, mv_step = 1;
+    }
+    else
+    {
+        dv_step = _train_data->step/CV_ELEM_SIZE(_train_data->type);
+        ds_step = 1;
+        if( _missing_mask )
+            mv_step = _missing_mask->step, ms_step = 1;
+    }
+    tflag = _tflag;
+
+    sample_count = sample_all;
+    var_count = var_all;
+
+    if( _sample_idx )
+    {
+        CV_CALL( sample_indices = cvPreprocessIndexArray( _sample_idx, sample_all ));
+        sidx = sample_indices->data.i;
+        sample_count = sample_indices->rows + sample_indices->cols - 1;
+    }
+
+    if( _var_idx )
+    {
+        CV_CALL( var_idx = cvPreprocessIndexArray( _var_idx, var_all ));
+        vidx = var_idx->data.i;
+        var_count = var_idx->rows + var_idx->cols - 1;
+    }
+
+    is_buf_16u = false;
+    if ( sample_count < 65536 )
+        is_buf_16u = true;
+
+    if( !CV_IS_MAT(_responses) ||
+        (CV_MAT_TYPE(_responses->type) != CV_32SC1 &&
+         CV_MAT_TYPE(_responses->type) != CV_32FC1) ||
+        (_responses->rows != 1 && _responses->cols != 1) ||
+        _responses->rows + _responses->cols - 1 != sample_all )
+        CV_ERROR( CV_StsBadArg, "The array of _responses must be an integer or "
+                  "floating-point vector containing as many elements as "
+                  "the total number of samples in the training data matrix" );
+
+    r_type = CV_VAR_CATEGORICAL;
+    if( _var_type )
+        CV_CALL( var_type0 = cvPreprocessVarType( _var_type, var_idx, var_count, &r_type ));
+
+    CV_CALL( var_type = cvCreateMat( 1, var_count+2, CV_32SC1 ));
+
+    cat_var_count = 0;
+    ord_var_count = -1;
+
+    is_classifier = r_type == CV_VAR_CATEGORICAL;
+
+    // step 0. calc the number of categorical vars
+    for( vi = 0; vi < var_count; vi++ )
+    {
+        char vt = var_type0 ? var_type0->data.ptr[vi] : CV_VAR_ORDERED;
+        var_type->data.i[vi] = vt == CV_VAR_CATEGORICAL ? cat_var_count++ : ord_var_count--;
+    }
+
+    ord_var_count = ~ord_var_count;
+    cv_n = params.cv_folds;
+    // set the two last elements of var_type array to be able
+    // to locate responses and cross-validation labels using
+    // the corresponding get_* functions.
+    var_type->data.i[var_count] = cat_var_count;
+    var_type->data.i[var_count+1] = cat_var_count+1;
+
+    // in case of single ordered predictor we need dummy cv_labels
+    // for safe split_node_data() operation
+    have_labels = cv_n > 0 || (ord_var_count == 1 && cat_var_count == 0) || _add_labels;
+
+    work_var_count = var_count + (is_classifier ? 1 : 0) // for responses class_labels
+                               + (have_labels ? 1 : 0); // for cv_labels
+
+    shared = _shared;
+    buf_count = shared ? 2 : 1;
+
+    buf_size = -1; // the member buf_size is obsolete
+
+    effective_buf_size = (uint64)(work_var_count + 1)*(uint64)sample_count * buf_count; // this is the total size of "CvMat buf" to be allocated
+    effective_buf_width = sample_count;
+    effective_buf_height = work_var_count+1;
+
+    if (effective_buf_width >= effective_buf_height)
+        effective_buf_height *= buf_count;
+    else
+        effective_buf_width *= buf_count;
+
+    if ((uint64)effective_buf_width * (uint64)effective_buf_height != effective_buf_size)
+    {
+        CV_Error(CV_StsBadArg, "The memory buffer cannot be allocated since its size exceeds integer fields limit");
+    }
+
+
+
+    if ( is_buf_16u )
+    {
+        CV_CALL( buf = cvCreateMat( effective_buf_height, effective_buf_width, CV_16UC1 ));
+        CV_CALL( pair16u32s_ptr = (CvPair16u32s*)cvAlloc( sample_count*sizeof(pair16u32s_ptr[0]) ));
+    }
+    else
+    {
+        CV_CALL( buf = cvCreateMat( effective_buf_height, effective_buf_width, CV_32SC1 ));
+        CV_CALL( int_ptr = (int**)cvAlloc( sample_count*sizeof(int_ptr[0]) ));
+    }
+
+    size = is_classifier ? (cat_var_count+1) : cat_var_count;
+    size = !size ? 1 : size;
+    CV_CALL( cat_count = cvCreateMat( 1, size, CV_32SC1 ));
+    CV_CALL( cat_ofs = cvCreateMat( 1, size, CV_32SC1 ));
+
+    size = is_classifier ? (cat_var_count + 1)*params.max_categories : cat_var_count*params.max_categories;
+    size = !size ? 1 : size;
+    CV_CALL( cat_map = cvCreateMat( 1, size, CV_32SC1 ));
+
+    // now calculate the maximum size of split,
+    // create memory storage that will keep nodes and splits of the decision tree
+    // allocate root node and the buffer for the whole training data
+    max_split_size = cvAlign(sizeof(CvDTreeSplit) +
+        (MAX(0,sample_count - 33)/32)*sizeof(int),sizeof(void*));
+    tree_block_size = MAX((int)sizeof(CvDTreeNode)*8, max_split_size);
+    tree_block_size = MAX(tree_block_size + block_size_delta, min_block_size);
+    CV_CALL( tree_storage = cvCreateMemStorage( tree_block_size ));
+    CV_CALL( node_heap = cvCreateSet( 0, sizeof(*node_heap), sizeof(CvDTreeNode), tree_storage ));
+
+    nv_size = var_count*sizeof(int);
+    nv_size = cvAlign(MAX( nv_size, (int)sizeof(CvSetElem) ), sizeof(void*));
+
+    temp_block_size = nv_size;
+
+    if( cv_n )
+    {
+        if( sample_count < cv_n*MAX(params.min_sample_count,10) )
+            CV_ERROR( CV_StsOutOfRange,
+                "The many folds in cross-validation for such a small dataset" );
+
+        cv_size = cvAlign( cv_n*(sizeof(int) + sizeof(double)*2), sizeof(double) );
+        temp_block_size = MAX(temp_block_size, cv_size);
+    }
+
+    temp_block_size = MAX( temp_block_size + block_size_delta, min_block_size );
+    CV_CALL( temp_storage = cvCreateMemStorage( temp_block_size ));
+    CV_CALL( nv_heap = cvCreateSet( 0, sizeof(*nv_heap), nv_size, temp_storage ));
+    if( cv_size )
+        CV_CALL( cv_heap = cvCreateSet( 0, sizeof(*cv_heap), cv_size, temp_storage ));
+
+    CV_CALL( data_root = new_node( 0, sample_count, 0, 0 ));
+
+    max_c_count = 1;
+
+    _fdst = 0;
+    _idst = 0;
+    if (ord_var_count)
+        _fdst = (float*)cvAlloc(sample_count*sizeof(_fdst[0]));
+    if (is_buf_16u && (cat_var_count || is_classifier))
+        _idst = (int*)cvAlloc(sample_count*sizeof(_idst[0]));
+
+    // transform the training data to convenient representation
+    for( vi = 0; vi <= var_count; vi++ )
+    {
+        int ci;
+        const uchar* mask = 0;
+        int64 m_step = 0, step;
+        const int* idata = 0;
+        const float* fdata = 0;
+        int num_valid = 0;
+
+        if( vi < var_count ) // analyze i-th input variable
+        {
+            int vi0 = vidx ? vidx[vi] : vi;
+            ci = get_var_type(vi);
+            step = ds_step; m_step = ms_step;
+            if( CV_MAT_TYPE(_train_data->type) == CV_32SC1 )
+                idata = _train_data->data.i + vi0*dv_step;
+            else
+                fdata = _train_data->data.fl + vi0*dv_step;
+            if( _missing_mask )
+                mask = _missing_mask->data.ptr + vi0*mv_step;
+        }
+        else // analyze _responses
+        {
+            ci = cat_var_count;
+            step = CV_IS_MAT_CONT(_responses->type) ?
+                1 : _responses->step / CV_ELEM_SIZE(_responses->type);
+            if( CV_MAT_TYPE(_responses->type) == CV_32SC1 )
+                idata = _responses->data.i;
+            else
+                fdata = _responses->data.fl;
+        }
+
+        if( (vi < var_count && ci>=0) ||
+            (vi == var_count && is_classifier) ) // process categorical variable or response
+        {
+            int c_count, prev_label;
+            int* c_map;
+
+            if (is_buf_16u)
+                udst = (unsigned short*)(buf->data.s + vi*sample_count);
+            else
+                idst = buf->data.i + vi*sample_count;
+
+            // copy data
+            for( i = 0; i < sample_count; i++ )
+            {
+                int val = INT_MAX, si = sidx ? sidx[i] : i;
+                if( !mask || !mask[(size_t)si*m_step] )
+                {
+                    if( idata )
+                        val = idata[(size_t)si*step];
+                    else
+                    {
+                        float t = fdata[(size_t)si*step];
+                        val = cvRound(t);
+                        if( fabs(t - val) > FLT_EPSILON )
+                        {
+                            sprintf( err, "%d-th value of %d-th (categorical) "
+                                "variable is not an integer", i, vi );
+                            CV_ERROR( CV_StsBadArg, err );
+                        }
+                    }
+
+                    if( val == INT_MAX )
+                    {
+                        sprintf( err, "%d-th value of %d-th (categorical) "
+                            "variable is too large", i, vi );
+                        CV_ERROR( CV_StsBadArg, err );
+                    }
+                    num_valid++;
+                }
+                if (is_buf_16u)
+                {
+                    _idst[i] = val;
+                    pair16u32s_ptr[i].u = udst + i;
+                    pair16u32s_ptr[i].i = _idst + i;
+                }
+                else
+                {
+                    idst[i] = val;
+                    int_ptr[i] = idst + i;
+                }
+            }
+
+            c_count = num_valid > 0;
+            if (is_buf_16u)
+            {
+                std::sort(pair16u32s_ptr, pair16u32s_ptr + sample_count, LessThanPairs());
+                // count the categories
+                for( i = 1; i < num_valid; i++ )
+                    if (*pair16u32s_ptr[i].i != *pair16u32s_ptr[i-1].i)
+                        c_count ++ ;
+            }
+            else
+            {
+                std::sort(int_ptr, int_ptr + sample_count, LessThanPtr<int>());
+                // count the categories
+                for( i = 1; i < num_valid; i++ )
+                    c_count += *int_ptr[i] != *int_ptr[i-1];
+            }
+
+            if( vi > 0 )
+                max_c_count = MAX( max_c_count, c_count );
+            cat_count->data.i[ci] = c_count;
+            cat_ofs->data.i[ci] = total_c_count;
+
+            // resize cat_map, if need
+            if( cat_map->cols < total_c_count + c_count )
+            {
+                tmp_map = cat_map;
+                CV_CALL( cat_map = cvCreateMat( 1,
+                    MAX(cat_map->cols*3/2,total_c_count+c_count), CV_32SC1 ));
+                for( i = 0; i < total_c_count; i++ )
+                    cat_map->data.i[i] = tmp_map->data.i[i];
+                cvReleaseMat( &tmp_map );
+            }
+
+            c_map = cat_map->data.i + total_c_count;
+            total_c_count += c_count;
+
+            c_count = -1;
+            if (is_buf_16u)
+            {
+                // compact the class indices and build the map
+                prev_label = ~*pair16u32s_ptr[0].i;
+                for( i = 0; i < num_valid; i++ )
+                {
+                    int cur_label = *pair16u32s_ptr[i].i;
+                    if( cur_label != prev_label )
+                        c_map[++c_count] = prev_label = cur_label;
+                    *pair16u32s_ptr[i].u = (unsigned short)c_count;
+                }
+                // replace labels for missing values with -1
+                for( ; i < sample_count; i++ )
+                    *pair16u32s_ptr[i].u = 65535;
+            }
+            else
+            {
+                // compact the class indices and build the map
+                prev_label = ~*int_ptr[0];
+                for( i = 0; i < num_valid; i++ )
+                {
+                    int cur_label = *int_ptr[i];
+                    if( cur_label != prev_label )
+                        c_map[++c_count] = prev_label = cur_label;
+                    *int_ptr[i] = c_count;
+                }
+                // replace labels for missing values with -1
+                for( ; i < sample_count; i++ )
+                    *int_ptr[i] = -1;
+            }
+        }
+        else if( ci < 0 ) // process ordered variable
+        {
+            if (is_buf_16u)
+                udst = (unsigned short*)(buf->data.s + vi*sample_count);
+            else
+                idst = buf->data.i + vi*sample_count;
+
+            for( i = 0; i < sample_count; i++ )
+            {
+                float val = ord_nan;
+                int si = sidx ? sidx[i] : i;
+                if( !mask || !mask[(size_t)si*m_step] )
+                {
+                    if( idata )
+                        val = (float)idata[(size_t)si*step];
+                    else
+                        val = fdata[(size_t)si*step];
+
+                    if( fabs(val) >= ord_nan )
+                    {
+                        sprintf( err, "%d-th value of %d-th (ordered) "
+                            "variable (=%g) is too large", i, vi, val );
+                        CV_ERROR( CV_StsBadArg, err );
+                    }
+                    num_valid++;
+                }
+
+                if (is_buf_16u)
+                    udst[i] = (unsigned short)i; // TODO: memory corruption may be here
+                else
+                    idst[i] = i;
+                _fdst[i] = val;
+
+            }
+            if (is_buf_16u)
+                std::sort(udst, udst + sample_count, LessThanIdx<float, unsigned short>(_fdst));
+            else
+                std::sort(idst, idst + sample_count, LessThanIdx<float, int>(_fdst));
+        }
+
+        if( vi < var_count )
+            data_root->set_num_valid(vi, num_valid);
+    }
+
+    // set sample labels
+    if (is_buf_16u)
+        udst = (unsigned short*)(buf->data.s + work_var_count*sample_count);
+    else
+        idst = buf->data.i + work_var_count*sample_count;
+
+    for (i = 0; i < sample_count; i++)
+    {
+        if (udst)
+            udst[i] = sidx ? (unsigned short)sidx[i] : (unsigned short)i;
+        else
+            idst[i] = sidx ? sidx[i] : i;
+    }
+
+    if( cv_n )
+    {
+        unsigned short* usdst = 0;
+        int* idst2 = 0;
+
+        if (is_buf_16u)
+        {
+            usdst = (unsigned short*)(buf->data.s + (get_work_var_count()-1)*sample_count);
+            for( i = vi = 0; i < sample_count; i++ )
+            {
+                usdst[i] = (unsigned short)vi++;
+                vi &= vi < cv_n ? -1 : 0;
+            }
+
+            for( i = 0; i < sample_count; i++ )
+            {
+                int a = (*rng)(sample_count);
+                int b = (*rng)(sample_count);
+                unsigned short unsh = (unsigned short)vi;
+                CV_SWAP( usdst[a], usdst[b], unsh );
+            }
+        }
+        else
+        {
+            idst2 = buf->data.i + (get_work_var_count()-1)*sample_count;
+            for( i = vi = 0; i < sample_count; i++ )
+            {
+                idst2[i] = vi++;
+                vi &= vi < cv_n ? -1 : 0;
+            }
+
+            for( i = 0; i < sample_count; i++ )
+            {
+                int a = (*rng)(sample_count);
+                int b = (*rng)(sample_count);
+                CV_SWAP( idst2[a], idst2[b], vi );
+            }
+        }
+    }
+
+    if ( cat_map )
+        cat_map->cols = MAX( total_c_count, 1 );
+
+    max_split_size = cvAlign(sizeof(CvDTreeSplit) +
+        (MAX(0,max_c_count - 33)/32)*sizeof(int),sizeof(void*));
+    CV_CALL( split_heap = cvCreateSet( 0, sizeof(*split_heap), max_split_size, tree_storage ));
+
+    have_priors = is_classifier && params.priors;
+    if( is_classifier )
+    {
+        int m = get_num_classes();
+        double sum = 0;
+        CV_CALL( priors = cvCreateMat( 1, m, CV_64F ));
+        for( i = 0; i < m; i++ )
+        {
+            double val = have_priors ? params.priors[i] : 1.;
+            if( val <= 0 )
+                CV_ERROR( CV_StsOutOfRange, "Every class weight should be positive" );
+            priors->data.db[i] = val;
+            sum += val;
+        }
+
+        // normalize weights
+        if( have_priors )
+            cvScale( priors, priors, 1./sum );
+
+        CV_CALL( priors_mult = cvCloneMat( priors ));
+        CV_CALL( counts = cvCreateMat( 1, m, CV_32SC1 ));
+    }
+
+
+    CV_CALL( direction = cvCreateMat( 1, sample_count, CV_8UC1 ));
+    CV_CALL( split_buf = cvCreateMat( 1, sample_count, CV_32SC1 ));
+
+    __END__;
+
+    if( data )
+        delete data;
+
+    if (_fdst)
+        cvFree( &_fdst );
+    if (_idst)
+        cvFree( &_idst );
+    cvFree( &int_ptr );
+    cvFree( &pair16u32s_ptr);
+    cvReleaseMat( &var_type0 );
+    cvReleaseMat( &sample_indices );
+    cvReleaseMat( &tmp_map );
+}
+
+void CvDTreeTrainData::do_responses_copy()
+{
+    responses_copy = cvCreateMat( responses->rows, responses->cols, responses->type );
+    cvCopy( responses, responses_copy);
+    responses = responses_copy;
+}
+
+CvDTreeNode* CvDTreeTrainData::subsample_data( const CvMat* _subsample_idx )
+{
+    CvDTreeNode* root = 0;
+    CvMat* isubsample_idx = 0;
+    CvMat* subsample_co = 0;
+
+    bool isMakeRootCopy = true;
+
+    CV_FUNCNAME( "CvDTreeTrainData::subsample_data" );
+
+    __BEGIN__;
+
+    if( !data_root )
+        CV_ERROR( CV_StsError, "No training data has been set" );
+
+    if( _subsample_idx )
+    {
+        CV_CALL( isubsample_idx = cvPreprocessIndexArray( _subsample_idx, sample_count ));
+
+        if( isubsample_idx->cols + isubsample_idx->rows - 1 == sample_count )
+        {
+            const int* sidx = isubsample_idx->data.i;
+            for( int i = 0; i < sample_count; i++ )
+            {
+                if( sidx[i] != i )
+                {
+                    isMakeRootCopy = false;
+                    break;
+                }
+            }
+        }
+        else
+            isMakeRootCopy = false;
+    }
+
+    if( isMakeRootCopy )
+    {
+        // make a copy of the root node
+        CvDTreeNode temp;
+        int i;
+        root = new_node( 0, 1, 0, 0 );
+        temp = *root;
+        *root = *data_root;
+        root->num_valid = temp.num_valid;
+        if( root->num_valid )
+        {
+            for( i = 0; i < var_count; i++ )
+                root->num_valid[i] = data_root->num_valid[i];
+        }
+        root->cv_Tn = temp.cv_Tn;
+        root->cv_node_risk = temp.cv_node_risk;
+        root->cv_node_error = temp.cv_node_error;
+    }
+    else
+    {
+        int* sidx = isubsample_idx->data.i;
+        // co - array of count/offset pairs (to handle duplicated values in _subsample_idx)
+        int* co, cur_ofs = 0;
+        int vi, i;
+        int workVarCount = get_work_var_count();
+        int count = isubsample_idx->rows + isubsample_idx->cols - 1;
+
+        root = new_node( 0, count, 1, 0 );
+
+        CV_CALL( subsample_co = cvCreateMat( 1, sample_count*2, CV_32SC1 ));
+        cvZero( subsample_co );
+        co = subsample_co->data.i;
+        for( i = 0; i < count; i++ )
+            co[sidx[i]*2]++;
+        for( i = 0; i < sample_count; i++ )
+        {
+            if( co[i*2] )
+            {
+                co[i*2+1] = cur_ofs;
+                cur_ofs += co[i*2];
+            }
+            else
+                co[i*2+1] = -1;
+        }
+
+        cv::AutoBuffer<uchar> inn_buf(sample_count*(2*sizeof(int) + sizeof(float)));
+        for( vi = 0; vi < workVarCount; vi++ )
+        {
+            int ci = get_var_type(vi);
+
+            if( ci >= 0 || vi >= var_count )
+            {
+                int num_valid = 0;
+                const int* src = CvDTreeTrainData::get_cat_var_data( data_root, vi, (int*)(uchar*)inn_buf );
+
+                if (is_buf_16u)
+                {
+                    unsigned short* udst = (unsigned short*)(buf->data.s + root->buf_idx*get_length_subbuf() +
+                        vi*sample_count + root->offset);
+                    for( i = 0; i < count; i++ )
+                    {
+                        int val = src[sidx[i]];
+                        udst[i] = (unsigned short)val;
+                        num_valid += val >= 0;
+                    }
+                }
+                else
+                {
+                    int* idst = buf->data.i + root->buf_idx*get_length_subbuf() +
+                        vi*sample_count + root->offset;
+                    for( i = 0; i < count; i++ )
+                    {
+                        int val = src[sidx[i]];
+                        idst[i] = val;
+                        num_valid += val >= 0;
+                    }
+                }
+
+                if( vi < var_count )
+                    root->set_num_valid(vi, num_valid);
+            }
+            else
+            {
+                int *src_idx_buf = (int*)(uchar*)inn_buf;
+                float *src_val_buf = (float*)(src_idx_buf + sample_count);
+                int* sample_indices_buf = (int*)(src_val_buf + sample_count);
+                const int* src_idx = 0;
+                const float* src_val = 0;
+                get_ord_var_data( data_root, vi, src_val_buf, src_idx_buf, &src_val, &src_idx, sample_indices_buf );
+                int j = 0, idx, count_i;
+                int num_valid = data_root->get_num_valid(vi);
+
+                if (is_buf_16u)
+                {
+                    unsigned short* udst_idx = (unsigned short*)(buf->data.s + root->buf_idx*get_length_subbuf() +
+                        vi*sample_count + data_root->offset);
+                    for( i = 0; i < num_valid; i++ )
+                    {
+                        idx = src_idx[i];
+                        count_i = co[idx*2];
+                        if( count_i )
+                            for( cur_ofs = co[idx*2+1]; count_i > 0; count_i--, j++, cur_ofs++ )
+                                udst_idx[j] = (unsigned short)cur_ofs;
+                    }
+
+                    root->set_num_valid(vi, j);
+
+                    for( ; i < sample_count; i++ )
+                    {
+                        idx = src_idx[i];
+                        count_i = co[idx*2];
+                        if( count_i )
+                            for( cur_ofs = co[idx*2+1]; count_i > 0; count_i--, j++, cur_ofs++ )
+                                udst_idx[j] = (unsigned short)cur_ofs;
+                    }
+                }
+                else
+                {
+                    int* idst_idx = buf->data.i + root->buf_idx*get_length_subbuf() +
+                        vi*sample_count + root->offset;
+                    for( i = 0; i < num_valid; i++ )
+                    {
+                        idx = src_idx[i];
+                        count_i = co[idx*2];
+                        if( count_i )
+                            for( cur_ofs = co[idx*2+1]; count_i > 0; count_i--, j++, cur_ofs++ )
+                                idst_idx[j] = cur_ofs;
+                    }
+
+                    root->set_num_valid(vi, j);
+
+                    for( ; i < sample_count; i++ )
+                    {
+                        idx = src_idx[i];
+                        count_i = co[idx*2];
+                        if( count_i )
+                            for( cur_ofs = co[idx*2+1]; count_i > 0; count_i--, j++, cur_ofs++ )
+                                idst_idx[j] = cur_ofs;
+                    }
+                }
+            }
+        }
+        // sample indices subsampling
+        const int* sample_idx_src = get_sample_indices(data_root, (int*)(uchar*)inn_buf);
+        if (is_buf_16u)
+        {
+            unsigned short* sample_idx_dst = (unsigned short*)(buf->data.s + root->buf_idx*get_length_subbuf() +
+                workVarCount*sample_count + root->offset);
+            for (i = 0; i < count; i++)
+                sample_idx_dst[i] = (unsigned short)sample_idx_src[sidx[i]];
+        }
+        else
+        {
+            int* sample_idx_dst = buf->data.i + root->buf_idx*get_length_subbuf() +
+                workVarCount*sample_count + root->offset;
+            for (i = 0; i < count; i++)
+                sample_idx_dst[i] = sample_idx_src[sidx[i]];
+        }
+    }
+
+    __END__;
+
+    cvReleaseMat( &isubsample_idx );
+    cvReleaseMat( &subsample_co );
+
+    return root;
+}
+
+
+void CvDTreeTrainData::get_vectors( const CvMat* _subsample_idx,
+                                    float* values, uchar* missing,
+                                    float* _responses, bool get_class_idx )
+{
+    CvMat* subsample_idx = 0;
+    CvMat* subsample_co = 0;
+
+    CV_FUNCNAME( "CvDTreeTrainData::get_vectors" );
+
+    __BEGIN__;
+
+    int i, vi, total = sample_count, count = total, cur_ofs = 0;
+    int* sidx = 0;
+    int* co = 0;
+
+    cv::AutoBuffer<uchar> inn_buf(sample_count*(2*sizeof(int) + sizeof(float)));
+    if( _subsample_idx )
+    {
+        CV_CALL( subsample_idx = cvPreprocessIndexArray( _subsample_idx, sample_count ));
+        sidx = subsample_idx->data.i;
+        CV_CALL( subsample_co = cvCreateMat( 1, sample_count*2, CV_32SC1 ));
+        co = subsample_co->data.i;
+        cvZero( subsample_co );
+        count = subsample_idx->cols + subsample_idx->rows - 1;
+        for( i = 0; i < count; i++ )
+            co[sidx[i]*2]++;
+        for( i = 0; i < total; i++ )
+        {
+            int count_i = co[i*2];
+            if( count_i )
+            {
+                co[i*2+1] = cur_ofs*var_count;
+                cur_ofs += count_i;
+            }
+        }
+    }
+
+    if( missing )
+        memset( missing, 1, count*var_count );
+
+    for( vi = 0; vi < var_count; vi++ )
+    {
+        int ci = get_var_type(vi);
+        if( ci >= 0 ) // categorical
+        {
+            float* dst = values + vi;
+            uchar* m = missing ? missing + vi : 0;
+            const int* src = get_cat_var_data(data_root, vi, (int*)(uchar*)inn_buf);
+
+            for( i = 0; i < count; i++, dst += var_count )
+            {
+                int idx = sidx ? sidx[i] : i;
+                int val = src[idx];
+                *dst = (float)val;
+                if( m )
+                {
+                    *m = (!is_buf_16u && val < 0) || (is_buf_16u && (val == 65535));
+                    m += var_count;
+                }
+            }
+        }
+        else // ordered
+        {
+            float* dst = values + vi;
+            uchar* m = missing ? missing + vi : 0;
+            int count1 = data_root->get_num_valid(vi);
+            float *src_val_buf = (float*)(uchar*)inn_buf;
+            int* src_idx_buf = (int*)(src_val_buf + sample_count);
+            int* sample_indices_buf = src_idx_buf + sample_count;
+            const float *src_val = 0;
+            const int* src_idx = 0;
+            get_ord_var_data(data_root, vi, src_val_buf, src_idx_buf, &src_val, &src_idx, sample_indices_buf);
+
+            for( i = 0; i < count1; i++ )
+            {
+                int idx = src_idx[i];
+                int count_i = 1;
+                if( co )
+                {
+                    count_i = co[idx*2];
+                    cur_ofs = co[idx*2+1];
+                }
+                else
+                    cur_ofs = idx*var_count;
+                if( count_i )
+                {
+                    float val = src_val[i];
+                    for( ; count_i > 0; count_i--, cur_ofs += var_count )
+                    {
+                        dst[cur_ofs] = val;
+                        if( m )
+                            m[cur_ofs] = 0;
+                    }
+                }
+            }
+        }
+    }
+
+    // copy responses
+    if( _responses )
+    {
+        if( is_classifier )
+        {
+            const int* src = get_class_labels(data_root, (int*)(uchar*)inn_buf);
+            for( i = 0; i < count; i++ )
+            {
+                int idx = sidx ? sidx[i] : i;
+                int val = get_class_idx ? src[idx] :
+                    cat_map->data.i[cat_ofs->data.i[cat_var_count]+src[idx]];
+                _responses[i] = (float)val;
+            }
+        }
+        else
+        {
+            float* val_buf = (float*)(uchar*)inn_buf;
+            int* sample_idx_buf = (int*)(val_buf + sample_count);
+            const float* _values = get_ord_responses(data_root, val_buf, sample_idx_buf);
+            for( i = 0; i < count; i++ )
+            {
+                int idx = sidx ? sidx[i] : i;
+                _responses[i] = _values[idx];
+            }
+        }
+    }
+
+    __END__;
+
+    cvReleaseMat( &subsample_idx );
+    cvReleaseMat( &subsample_co );
+}
+
+
+CvDTreeNode* CvDTreeTrainData::new_node( CvDTreeNode* parent, int count,
+                                         int storage_idx, int offset )
+{
+    CvDTreeNode* node = (CvDTreeNode*)cvSetNew( node_heap );
+
+    node->sample_count = count;
+    node->depth = parent ? parent->depth + 1 : 0;
+    node->parent = parent;
+    node->left = node->right = 0;
+    node->split = 0;
+    node->value = 0;
+    node->class_idx = 0;
+    node->maxlr = 0.;
+
+    node->buf_idx = storage_idx;
+    node->offset = offset;
+    if( nv_heap )
+        node->num_valid = (int*)cvSetNew( nv_heap );
+    else
+        node->num_valid = 0;
+    node->alpha = node->node_risk = node->tree_risk = node->tree_error = 0.;
+    node->complexity = 0;
+
+    if( params.cv_folds > 0 && cv_heap )
+    {
+        int cv_n = params.cv_folds;
+        node->Tn = INT_MAX;
+        node->cv_Tn = (int*)cvSetNew( cv_heap );
+        node->cv_node_risk = (double*)cvAlignPtr(node->cv_Tn + cv_n, sizeof(double));
+        node->cv_node_error = node->cv_node_risk + cv_n;
+    }
+    else
+    {
+        node->Tn = 0;
+        node->cv_Tn = 0;
+        node->cv_node_risk = 0;
+        node->cv_node_error = 0;
+    }
+
+    return node;
+}
+
+
+CvDTreeSplit* CvDTreeTrainData::new_split_ord( int vi, float cmp_val,
+                int split_point, int inversed, float quality )
+{
+    CvDTreeSplit* split = (CvDTreeSplit*)cvSetNew( split_heap );
+    split->var_idx = vi;
+    split->condensed_idx = INT_MIN;
+    split->ord.c = cmp_val;
+    split->ord.split_point = split_point;
+    split->inversed = inversed;
+    split->quality = quality;
+    split->next = 0;
+
+    return split;
+}
+
+
+CvDTreeSplit* CvDTreeTrainData::new_split_cat( int vi, float quality )
+{
+    CvDTreeSplit* split = (CvDTreeSplit*)cvSetNew( split_heap );
+    int i, n = (max_c_count + 31)/32;
+
+    split->var_idx = vi;
+    split->condensed_idx = INT_MIN;
+    split->inversed = 0;
+    split->quality = quality;
+    for( i = 0; i < n; i++ )
+        split->subset[i] = 0;
+    split->next = 0;
+
+    return split;
+}
+
+
+void CvDTreeTrainData::free_node( CvDTreeNode* node )
+{
+    CvDTreeSplit* split = node->split;
+    free_node_data( node );
+    while( split )
+    {
+        CvDTreeSplit* next = split->next;
+        cvSetRemoveByPtr( split_heap, split );
+        split = next;
+    }
+    node->split = 0;
+    cvSetRemoveByPtr( node_heap, node );
+}
+
+
+void CvDTreeTrainData::free_node_data( CvDTreeNode* node )
+{
+    if( node->num_valid )
+    {
+        cvSetRemoveByPtr( nv_heap, node->num_valid );
+        node->num_valid = 0;
+    }
+    // do not free cv_* fields, as all the cross-validation related data is released at once.
+}
+
+
+void CvDTreeTrainData::free_train_data()
+{
+    cvReleaseMat( &counts );
+    cvReleaseMat( &buf );
+    cvReleaseMat( &direction );
+    cvReleaseMat( &split_buf );
+    cvReleaseMemStorage( &temp_storage );
+    cvReleaseMat( &responses_copy );
+    cv_heap = nv_heap = 0;
+}
+
+
+void CvDTreeTrainData::clear()
+{
+    free_train_data();
+
+    cvReleaseMemStorage( &tree_storage );
+
+    cvReleaseMat( &var_idx );
+    cvReleaseMat( &var_type );
+    cvReleaseMat( &cat_count );
+    cvReleaseMat( &cat_ofs );
+    cvReleaseMat( &cat_map );
+    cvReleaseMat( &priors );
+    cvReleaseMat( &priors_mult );
+
+    node_heap = split_heap = 0;
+
+    sample_count = var_all = var_count = max_c_count = ord_var_count = cat_var_count = 0;
+    have_labels = have_priors = is_classifier = false;
+
+    buf_count = buf_size = 0;
+    shared = false;
+
+    data_root = 0;
+
+    rng = &cv::theRNG();
+}
+
+
+int CvDTreeTrainData::get_num_classes() const
+{
+    return is_classifier ? cat_count->data.i[cat_var_count] : 0;
+}
+
+
+int CvDTreeTrainData::get_var_type(int vi) const
+{
+    return var_type->data.i[vi];
+}
+
+void CvDTreeTrainData::get_ord_var_data( CvDTreeNode* n, int vi, float* ord_values_buf, int* sorted_indices_buf,
+                                         const float** ord_values, const int** sorted_indices, int* sample_indices_buf )
+{
+    int vidx = var_idx ? var_idx->data.i[vi] : vi;
+    int node_sample_count = n->sample_count;
+    int td_step = train_data->step/CV_ELEM_SIZE(train_data->type);
+
+    const int* sample_indices = get_sample_indices(n, sample_indices_buf);
+
+    if( !is_buf_16u )
+        *sorted_indices = buf->data.i + n->buf_idx*get_length_subbuf() +
+        vi*sample_count + n->offset;
+    else {
+        const unsigned short* short_indices = (const unsigned short*)(buf->data.s + n->buf_idx*get_length_subbuf() +
+            vi*sample_count + n->offset );
+        for( int i = 0; i < node_sample_count; i++ )
+            sorted_indices_buf[i] = short_indices[i];
+        *sorted_indices = sorted_indices_buf;
+    }
+
+    if( tflag == CV_ROW_SAMPLE )
+    {
+        for( int i = 0; i < node_sample_count &&
+            ((((*sorted_indices)[i] >= 0) && !is_buf_16u) || (((*sorted_indices)[i] != 65535) && is_buf_16u)); i++ )
+        {
+            int idx = (*sorted_indices)[i];
+            idx = sample_indices[idx];
+            ord_values_buf[i] = *(train_data->data.fl + idx * td_step + vidx);
+        }
+    }
+    else
+        for( int i = 0; i < node_sample_count &&
+            ((((*sorted_indices)[i] >= 0) && !is_buf_16u) || (((*sorted_indices)[i] != 65535) && is_buf_16u)); i++ )
+        {
+            int idx = (*sorted_indices)[i];
+            idx = sample_indices[idx];
+            ord_values_buf[i] = *(train_data->data.fl + vidx* td_step + idx);
+        }
+
+    *ord_values = ord_values_buf;
+}
+
+
+const int* CvDTreeTrainData::get_class_labels( CvDTreeNode* n, int* labels_buf )
+{
+    if (is_classifier)
+        return get_cat_var_data( n, var_count, labels_buf);
+    return 0;
+}
+
+const int* CvDTreeTrainData::get_sample_indices( CvDTreeNode* n, int* indices_buf )
+{
+    return get_cat_var_data( n, get_work_var_count(), indices_buf );
+}
+
+const float* CvDTreeTrainData::get_ord_responses( CvDTreeNode* n, float* values_buf, int*sample_indices_buf )
+{
+    int _sample_count = n->sample_count;
+    int r_step = CV_IS_MAT_CONT(responses->type) ? 1 : responses->step/CV_ELEM_SIZE(responses->type);
+    const int* indices = get_sample_indices(n, sample_indices_buf);
+
+    for( int i = 0; i < _sample_count &&
+        (((indices[i] >= 0) && !is_buf_16u) || ((indices[i] != 65535) && is_buf_16u)); i++ )
+    {
+        int idx = indices[i];
+        values_buf[i] = *(responses->data.fl + idx * r_step);
+    }
+
+    return values_buf;
+}
+
+
+const int* CvDTreeTrainData::get_cv_labels( CvDTreeNode* n, int* labels_buf )
+{
+    if (have_labels)
+        return get_cat_var_data( n, get_work_var_count()- 1, labels_buf);
+    return 0;
+}
+
+
+const int* CvDTreeTrainData::get_cat_var_data( CvDTreeNode* n, int vi, int* cat_values_buf)
+{
+    const int* cat_values = 0;
+    if( !is_buf_16u )
+        cat_values = buf->data.i + n->buf_idx*get_length_subbuf() +
+            vi*sample_count + n->offset;
+    else {
+        const unsigned short* short_values = (const unsigned short*)(buf->data.s + n->buf_idx*get_length_subbuf() +
+            vi*sample_count + n->offset);
+        for( int i = 0; i < n->sample_count; i++ )
+            cat_values_buf[i] = short_values[i];
+        cat_values = cat_values_buf;
+    }
+    return cat_values;
+}
+
+
+int CvDTreeTrainData::get_child_buf_idx( CvDTreeNode* n )
+{
+    int idx = n->buf_idx + 1;
+    if( idx >= buf_count )
+        idx = shared ? 1 : 0;
+    return idx;
+}
+
+
+void CvDTreeTrainData::write_params( CvFileStorage* fs ) const
+{
+    CV_FUNCNAME( "CvDTreeTrainData::write_params" );
+
+    __BEGIN__;
+
+    int vi, vcount = var_count;
+
+    cvWriteInt( fs, "is_classifier", is_classifier ? 1 : 0 );
+    cvWriteInt( fs, "var_all", var_all );
+    cvWriteInt( fs, "var_count", var_count );
+    cvWriteInt( fs, "ord_var_count", ord_var_count );
+    cvWriteInt( fs, "cat_var_count", cat_var_count );
+
+    cvStartWriteStruct( fs, "training_params", CV_NODE_MAP );
+    cvWriteInt( fs, "use_surrogates", params.use_surrogates ? 1 : 0 );
+
+    if( is_classifier )
+    {
+        cvWriteInt( fs, "max_categories", params.max_categories );
+    }
+    else
+    {
+        cvWriteReal( fs, "regression_accuracy", params.regression_accuracy );
+    }
+
+    cvWriteInt( fs, "max_depth", params.max_depth );
+    cvWriteInt( fs, "min_sample_count", params.min_sample_count );
+    cvWriteInt( fs, "cross_validation_folds", params.cv_folds );
+
+    if( params.cv_folds > 1 )
+    {
+        cvWriteInt( fs, "use_1se_rule", params.use_1se_rule ? 1 : 0 );
+        cvWriteInt( fs, "truncate_pruned_tree", params.truncate_pruned_tree ? 1 : 0 );
+    }
+
+    if( priors )
+        cvWrite( fs, "priors", priors );
+
+    cvEndWriteStruct( fs );
+
+    if( var_idx )
+        cvWrite( fs, "var_idx", var_idx );
+
+    cvStartWriteStruct( fs, "var_type", CV_NODE_SEQ+CV_NODE_FLOW );
+
+    for( vi = 0; vi < vcount; vi++ )
+        cvWriteInt( fs, 0, var_type->data.i[vi] >= 0 );
+
+    cvEndWriteStruct( fs );
+
+    if( cat_count && (cat_var_count > 0 || is_classifier) )
+    {
+        CV_ASSERT( cat_count != 0 );
+        cvWrite( fs, "cat_count", cat_count );
+        cvWrite( fs, "cat_map", cat_map );
+    }
+
+    __END__;
+}
+
+
+void CvDTreeTrainData::read_params( CvFileStorage* fs, CvFileNode* node )
+{
+    CV_FUNCNAME( "CvDTreeTrainData::read_params" );
+
+    __BEGIN__;
+
+    CvFileNode *tparams_node, *vartype_node;
+    CvSeqReader reader;
+    int vi, max_split_size, tree_block_size;
+
+    is_classifier = (cvReadIntByName( fs, node, "is_classifier" ) != 0);
+    var_all = cvReadIntByName( fs, node, "var_all" );
+    var_count = cvReadIntByName( fs, node, "var_count", var_all );
+    cat_var_count = cvReadIntByName( fs, node, "cat_var_count" );
+    ord_var_count = cvReadIntByName( fs, node, "ord_var_count" );
+
+    tparams_node = cvGetFileNodeByName( fs, node, "training_params" );
+
+    if( tparams_node ) // training parameters are not necessary
+    {
+        params.use_surrogates = cvReadIntByName( fs, tparams_node, "use_surrogates", 1 ) != 0;
+
+        if( is_classifier )
+        {
+            params.max_categories = cvReadIntByName( fs, tparams_node, "max_categories" );
+        }
+        else
+        {
+            params.regression_accuracy =
+                (float)cvReadRealByName( fs, tparams_node, "regression_accuracy" );
+        }
+
+        params.max_depth = cvReadIntByName( fs, tparams_node, "max_depth" );
+        params.min_sample_count = cvReadIntByName( fs, tparams_node, "min_sample_count" );
+        params.cv_folds = cvReadIntByName( fs, tparams_node, "cross_validation_folds" );
+
+        if( params.cv_folds > 1 )
+        {
+            params.use_1se_rule = cvReadIntByName( fs, tparams_node, "use_1se_rule" ) != 0;
+            params.truncate_pruned_tree =
+                cvReadIntByName( fs, tparams_node, "truncate_pruned_tree" ) != 0;
+        }
+
+        priors = (CvMat*)cvReadByName( fs, tparams_node, "priors" );
+        if( priors )
+        {
+            if( !CV_IS_MAT(priors) )
+                CV_ERROR( CV_StsParseError, "priors must stored as a matrix" );
+            priors_mult = cvCloneMat( priors );
+        }
+    }
+
+    CV_CALL( var_idx = (CvMat*)cvReadByName( fs, node, "var_idx" ));
+    if( var_idx )
+    {
+        if( !CV_IS_MAT(var_idx) ||
+            (var_idx->cols != 1 && var_idx->rows != 1) ||
+            var_idx->cols + var_idx->rows - 1 != var_count ||
+            CV_MAT_TYPE(var_idx->type) != CV_32SC1 )
+            CV_ERROR( CV_StsParseError,
+                "var_idx (if exist) must be valid 1d integer vector containing <var_count> elements" );
+
+        for( vi = 0; vi < var_count; vi++ )
+            if( (unsigned)var_idx->data.i[vi] >= (unsigned)var_all )
+                CV_ERROR( CV_StsOutOfRange, "some of var_idx elements are out of range" );
+    }
+
+    ////// read var type
+    CV_CALL( var_type = cvCreateMat( 1, var_count + 2, CV_32SC1 ));
+
+    cat_var_count = 0;
+    ord_var_count = -1;
+    vartype_node = cvGetFileNodeByName( fs, node, "var_type" );
+
+    if( vartype_node && CV_NODE_TYPE(vartype_node->tag) == CV_NODE_INT && var_count == 1 )
+        var_type->data.i[0] = vartype_node->data.i ? cat_var_count++ : ord_var_count--;
+    else
+    {
+        if( !vartype_node || CV_NODE_TYPE(vartype_node->tag) != CV_NODE_SEQ ||
+            vartype_node->data.seq->total != var_count )
+            CV_ERROR( CV_StsParseError, "var_type must exist and be a sequence of 0's and 1's" );
+
+        cvStartReadSeq( vartype_node->data.seq, &reader );
+
+        for( vi = 0; vi < var_count; vi++ )
+        {
+            CvFileNode* n = (CvFileNode*)reader.ptr;
+            if( CV_NODE_TYPE(n->tag) != CV_NODE_INT || (n->data.i & ~1) )
+                CV_ERROR( CV_StsParseError, "var_type must exist and be a sequence of 0's and 1's" );
+            var_type->data.i[vi] = n->data.i ? cat_var_count++ : ord_var_count--;
+            CV_NEXT_SEQ_ELEM( reader.seq->elem_size, reader );
+        }
+    }
+    var_type->data.i[var_count] = cat_var_count;
+
+    ord_var_count = ~ord_var_count;
+    //////
+
+    if( cat_var_count > 0 || is_classifier )
+    {
+        int ccount, total_c_count = 0;
+        CV_CALL( cat_count = (CvMat*)cvReadByName( fs, node, "cat_count" ));
+        CV_CALL( cat_map = (CvMat*)cvReadByName( fs, node, "cat_map" ));
+
+        if( !CV_IS_MAT(cat_count) || !CV_IS_MAT(cat_map) ||
+            (cat_count->cols != 1 && cat_count->rows != 1) ||
+            CV_MAT_TYPE(cat_count->type) != CV_32SC1 ||
+            cat_count->cols + cat_count->rows - 1 != cat_var_count + is_classifier ||
+            (cat_map->cols != 1 && cat_map->rows != 1) ||
+            CV_MAT_TYPE(cat_map->type) != CV_32SC1 )
+            CV_ERROR( CV_StsParseError,
+            "Both cat_count and cat_map must exist and be valid 1d integer vectors of an appropriate size" );
+
+        ccount = cat_var_count + is_classifier;
+
+        CV_CALL( cat_ofs = cvCreateMat( 1, ccount + 1, CV_32SC1 ));
+        cat_ofs->data.i[0] = 0;
+        max_c_count = 1;
+
+        for( vi = 0; vi < ccount; vi++ )
+        {
+            int val = cat_count->data.i[vi];
+            if( val <= 0 )
+                CV_ERROR( CV_StsOutOfRange, "some of cat_count elements are out of range" );
+            max_c_count = MAX( max_c_count, val );
+            cat_ofs->data.i[vi+1] = total_c_count += val;
+        }
+
+        if( cat_map->cols + cat_map->rows - 1 != total_c_count )
+            CV_ERROR( CV_StsBadSize,
+            "cat_map vector length is not equal to the total number of categories in all categorical vars" );
+    }
+
+    max_split_size = cvAlign(sizeof(CvDTreeSplit) +
+        (MAX(0,max_c_count - 33)/32)*sizeof(int),sizeof(void*));
+
+    tree_block_size = MAX((int)sizeof(CvDTreeNode)*8, max_split_size);
+    tree_block_size = MAX(tree_block_size + block_size_delta, min_block_size);
+    CV_CALL( tree_storage = cvCreateMemStorage( tree_block_size ));
+    CV_CALL( node_heap = cvCreateSet( 0, sizeof(node_heap[0]),
+            sizeof(CvDTreeNode), tree_storage ));
+    CV_CALL( split_heap = cvCreateSet( 0, sizeof(split_heap[0]),
+            max_split_size, tree_storage ));
+
+    __END__;
+}
+
+/////////////////////// Decision Tree /////////////////////////
+CvDTreeParams::CvDTreeParams() : max_categories(10), max_depth(INT_MAX), min_sample_count(10),
+    cv_folds(10), use_surrogates(true), use_1se_rule(true),
+    truncate_pruned_tree(true), regression_accuracy(0.01f), priors(0)
+{}
+
+CvDTreeParams::CvDTreeParams( int _max_depth, int _min_sample_count,
+                              float _regression_accuracy, bool _use_surrogates,
+                              int _max_categories, int _cv_folds,
+                              bool _use_1se_rule, bool _truncate_pruned_tree,
+                              const float* _priors ) :
+    max_categories(_max_categories), max_depth(_max_depth),
+    min_sample_count(_min_sample_count), cv_folds (_cv_folds),
+    use_surrogates(_use_surrogates), use_1se_rule(_use_1se_rule),
+    truncate_pruned_tree(_truncate_pruned_tree),
+    regression_accuracy(_regression_accuracy),
+    priors(_priors)
+{}
+
+CvDTree::CvDTree()
+{
+    data = 0;
+    var_importance = 0;
+    default_model_name = "my_tree";
+
+    clear();
+}
+
+
+void CvDTree::clear()
+{
+    cvReleaseMat( &var_importance );
+    if( data )
+    {
+        if( !data->shared )
+            delete data;
+        else
+            free_tree();
+        data = 0;
+    }
+    root = 0;
+    pruned_tree_idx = -1;
+}
+
+
+CvDTree::~CvDTree()
+{
+    clear();
+}
+
+
+const CvDTreeNode* CvDTree::get_root() const
+{
+    return root;
+}
+
+
+int CvDTree::get_pruned_tree_idx() const
+{
+    return pruned_tree_idx;
+}
+
+
+CvDTreeTrainData* CvDTree::get_data()
+{
+    return data;
+}
+
+
+bool CvDTree::train( const CvMat* _train_data, int _tflag,
+                     const CvMat* _responses, const CvMat* _var_idx,
+                     const CvMat* _sample_idx, const CvMat* _var_type,
+                     const CvMat* _missing_mask, CvDTreeParams _params )
+{
+    bool result = false;
+
+    CV_FUNCNAME( "CvDTree::train" );
+
+    __BEGIN__;
+
+    clear();
+    data = new CvDTreeTrainData( _train_data, _tflag, _responses,
+                                 _var_idx, _sample_idx, _var_type,
+                                 _missing_mask, _params, false );
+    CV_CALL( result = do_train(0) );
+
+    __END__;
+
+    return result;
+}
+
+bool CvDTree::train( const Mat& _train_data, int _tflag,
+                    const Mat& _responses, const Mat& _var_idx,
+                    const Mat& _sample_idx, const Mat& _var_type,
+                    const Mat& _missing_mask, CvDTreeParams _params )
+{
+    train_data_hdr = _train_data;
+    train_data_mat = _train_data;
+    responses_hdr = _responses;
+    responses_mat = _responses;
+
+    CvMat vidx=_var_idx, sidx=_sample_idx, vtype=_var_type, mmask=_missing_mask;
+
+    return train(&train_data_hdr, _tflag, &responses_hdr, vidx.data.ptr ? &vidx : 0, sidx.data.ptr ? &sidx : 0,
+                 vtype.data.ptr ? &vtype : 0, mmask.data.ptr ? &mmask : 0, _params);
+}
+
+
+bool CvDTree::train( CvMLData* _data, CvDTreeParams _params )
+{
+   bool result = false;
+
+    CV_FUNCNAME( "CvDTree::train" );
+
+    __BEGIN__;
+
+    const CvMat* values = _data->get_values();
+    const CvMat* response = _data->get_responses();
+    const CvMat* missing = _data->get_missing();
+    const CvMat* var_types = _data->get_var_types();
+    const CvMat* train_sidx = _data->get_train_sample_idx();
+    const CvMat* var_idx = _data->get_var_idx();
+
+    CV_CALL( result = train( values, CV_ROW_SAMPLE, response, var_idx,
+        train_sidx, var_types, missing, _params ) );
+
+    __END__;
+
+    return result;
+}
+
+bool CvDTree::train( CvDTreeTrainData* _data, const CvMat* _subsample_idx )
+{
+    bool result = false;
+
+    CV_FUNCNAME( "CvDTree::train" );
+
+    __BEGIN__;
+
+    clear();
+    data = _data;
+    data->shared = true;
+    CV_CALL( result = do_train(_subsample_idx));
+
+    __END__;
+
+    return result;
+}
+
+
+bool CvDTree::do_train( const CvMat* _subsample_idx )
+{
+    bool result = false;
+
+    CV_FUNCNAME( "CvDTree::do_train" );
+
+    __BEGIN__;
+
+    root = data->subsample_data( _subsample_idx );
+
+    CV_CALL( try_split_node(root));
+
+    if( root->split )
+    {
+        CV_Assert( root->left );
+        CV_Assert( root->right );
+
+        if( data->params.cv_folds > 0 )
+            CV_CALL( prune_cv() );
+
+        if( !data->shared )
+            data->free_train_data();
+
+        result = true;
+    }
+
+    __END__;
+
+    return result;
+}
+
+
+void CvDTree::try_split_node( CvDTreeNode* node )
+{
+    CvDTreeSplit* best_split = 0;
+    int i, n = node->sample_count, vi;
+    bool can_split = true;
+    double quality_scale;
+
+    calc_node_value( node );
+
+    if( node->sample_count <= data->params.min_sample_count ||
+        node->depth >= data->params.max_depth )
+        can_split = false;
+
+    if( can_split && data->is_classifier )
+    {
+        // check if we have a "pure" node,
+        // we assume that cls_count is filled by calc_node_value()
+        int* cls_count = data->counts->data.i;
+        int nz = 0, m = data->get_num_classes();
+        for( i = 0; i < m; i++ )
+            nz += cls_count[i] != 0;
+        if( nz == 1 ) // there is only one class
+            can_split = false;
+    }
+    else if( can_split )
+    {
+        if( sqrt(node->node_risk)/n < data->params.regression_accuracy )
+            can_split = false;
+    }
+
+    if( can_split )
+    {
+        best_split = find_best_split(node);
+        // TODO: check the split quality ...
+        node->split = best_split;
+    }
+    if( !can_split || !best_split )
+    {
+        data->free_node_data(node);
+        return;
+    }
+
+    quality_scale = calc_node_dir( node );
+    if( data->params.use_surrogates )
+    {
+        // find all the surrogate splits
+        // and sort them by their similarity to the primary one
+        for( vi = 0; vi < data->var_count; vi++ )
+        {
+            CvDTreeSplit* split;
+            int ci = data->get_var_type(vi);
+
+            if( vi == best_split->var_idx )
+                continue;
+
+            if( ci >= 0 )
+                split = find_surrogate_split_cat( node, vi );
+            else
+                split = find_surrogate_split_ord( node, vi );
+
+            if( split )
+            {
+                // insert the split
+                CvDTreeSplit* prev_split = node->split;
+                split->quality = (float)(split->quality*quality_scale);
+
+                while( prev_split->next &&
+                       prev_split->next->quality > split->quality )
+                    prev_split = prev_split->next;
+                split->next = prev_split->next;
+                prev_split->next = split;
+            }
+        }
+    }
+    split_node_data( node );
+    try_split_node( node->left );
+    try_split_node( node->right );
+}
+
+
+// calculate direction (left(-1),right(1),missing(0))
+// for each sample using the best split
+// the function returns scale coefficients for surrogate split quality factors.
+// the scale is applied to normalize surrogate split quality relatively to the
+// best (primary) split quality. That is, if a surrogate split is absolutely
+// identical to the primary split, its quality will be set to the maximum value =
+// quality of the primary split; otherwise, it will be lower.
+// besides, the function compute node->maxlr,
+// minimum possible quality (w/o considering the above mentioned scale)
+// for a surrogate split. Surrogate splits with quality less than node->maxlr
+// are not discarded.
+double CvDTree::calc_node_dir( CvDTreeNode* node )
+{
+    char* dir = (char*)data->direction->data.ptr;
+    int i, n = node->sample_count, vi = node->split->var_idx;
+    double L, R;
+
+    assert( !node->split->inversed );
+
+    if( data->get_var_type(vi) >= 0 ) // split on categorical var
+    {
+        cv::AutoBuffer<int> inn_buf(n*(!data->have_priors ? 1 : 2));
+        int* labels_buf = (int*)inn_buf;
+        const int* labels = data->get_cat_var_data( node, vi, labels_buf );
+        const int* subset = node->split->subset;
+        if( !data->have_priors )
+        {
+            int sum = 0, sum_abs = 0;
+
+            for( i = 0; i < n; i++ )
+            {
+                int idx = labels[i];
+                int d = ( ((idx >= 0)&&(!data->is_buf_16u)) || ((idx != 65535)&&(data->is_buf_16u)) ) ?
+                    CV_DTREE_CAT_DIR(idx,subset) : 0;
+                sum += d; sum_abs += d & 1;
+                dir[i] = (char)d;
+            }
+
+            R = (sum_abs + sum) >> 1;
+            L = (sum_abs - sum) >> 1;
+        }
+        else
+        {
+            const double* priors = data->priors_mult->data.db;
+            double sum = 0, sum_abs = 0;
+            int* responses_buf = labels_buf + n;
+            const int* responses = data->get_class_labels(node, responses_buf);
+
+            for( i = 0; i < n; i++ )
+            {
+                int idx = labels[i];
+                double w = priors[responses[i]];
+                int d = idx >= 0 ? CV_DTREE_CAT_DIR(idx,subset) : 0;
+                sum += d*w; sum_abs += (d & 1)*w;
+                dir[i] = (char)d;
+            }
+
+            R = (sum_abs + sum) * 0.5;
+            L = (sum_abs - sum) * 0.5;
+        }
+    }
+    else // split on ordered var
+    {
+        int split_point = node->split->ord.split_point;
+        int n1 = node->get_num_valid(vi);
+        cv::AutoBuffer<uchar> inn_buf(n*(sizeof(int)*(data->have_priors ? 3 : 2) + sizeof(float)));
+        float* val_buf = (float*)(uchar*)inn_buf;
+        int* sorted_buf = (int*)(val_buf + n);
+        int* sample_idx_buf = sorted_buf + n;
+        const float* val = 0;
+        const int* sorted = 0;
+        data->get_ord_var_data( node, vi, val_buf, sorted_buf, &val, &sorted, sample_idx_buf);
+
+        assert( 0 <= split_point && split_point < n1-1 );
+
+        if( !data->have_priors )
+        {
+            for( i = 0; i <= split_point; i++ )
+                dir[sorted[i]] = (char)-1;
+            for( ; i < n1; i++ )
+                dir[sorted[i]] = (char)1;
+            for( ; i < n; i++ )
+                dir[sorted[i]] = (char)0;
+
+            L = split_point-1;
+            R = n1 - split_point + 1;
+        }
+        else
+        {
+            const double* priors = data->priors_mult->data.db;
+            int* responses_buf = sample_idx_buf + n;
+            const int* responses = data->get_class_labels(node, responses_buf);
+            L = R = 0;
+
+            for( i = 0; i <= split_point; i++ )
+            {
+                int idx = sorted[i];
+                double w = priors[responses[idx]];
+                dir[idx] = (char)-1;
+                L += w;
+            }
+
+            for( ; i < n1; i++ )
+            {
+                int idx = sorted[i];
+                double w = priors[responses[idx]];
+                dir[idx] = (char)1;
+                R += w;
+            }
+
+            for( ; i < n; i++ )
+                dir[sorted[i]] = (char)0;
+        }
+    }
+    node->maxlr = MAX( L, R );
+    return node->split->quality/(L + R);
+}
+
+
+namespace cv
+{
+
+template<> CV_EXPORTS void DefaultDeleter<CvDTreeSplit>::operator ()(CvDTreeSplit* obj) const
+{
+    fastFree(obj);
+}
+
+DTreeBestSplitFinder::DTreeBestSplitFinder( CvDTree* _tree, CvDTreeNode* _node)
+{
+    tree = _tree;
+    node = _node;
+    splitSize = tree->get_data()->split_heap->elem_size;
+
+    bestSplit.reset((CvDTreeSplit*)fastMalloc(splitSize));
+    memset(bestSplit.get(), 0, splitSize);
+    bestSplit->quality = -1;
+    bestSplit->condensed_idx = INT_MIN;
+    split.reset((CvDTreeSplit*)fastMalloc(splitSize));
+    memset(split.get(), 0, splitSize);
+    //haveSplit = false;
+}
+
+DTreeBestSplitFinder::DTreeBestSplitFinder( const DTreeBestSplitFinder& finder, Split )
+{
+    tree = finder.tree;
+    node = finder.node;
+    splitSize = tree->get_data()->split_heap->elem_size;
+
+    bestSplit.reset((CvDTreeSplit*)fastMalloc(splitSize));
+    memcpy(bestSplit.get(), finder.bestSplit.get(), splitSize);
+    split.reset((CvDTreeSplit*)fastMalloc(splitSize));
+    memset(split.get(), 0, splitSize);
+}
+
+void DTreeBestSplitFinder::operator()(const BlockedRange& range)
+{
+    int vi, vi1 = range.begin(), vi2 = range.end();
+    int n = node->sample_count;
+    CvDTreeTrainData* data = tree->get_data();
+    AutoBuffer<uchar> inn_buf(2*n*(sizeof(int) + sizeof(float)));
+
+    for( vi = vi1; vi < vi2; vi++ )
+    {
+        CvDTreeSplit *res;
+        int ci = data->get_var_type(vi);
+        if( node->get_num_valid(vi) <= 1 )
+            continue;
+
+        if( data->is_classifier )
+        {
+            if( ci >= 0 )
+                res = tree->find_split_cat_class( node, vi, bestSplit->quality, split, (uchar*)inn_buf );
+            else
+                res = tree->find_split_ord_class( node, vi, bestSplit->quality, split, (uchar*)inn_buf );
+        }
+        else
+        {
+            if( ci >= 0 )
+                res = tree->find_split_cat_reg( node, vi, bestSplit->quality, split, (uchar*)inn_buf );
+            else
+                res = tree->find_split_ord_reg( node, vi, bestSplit->quality, split, (uchar*)inn_buf );
+        }
+
+        if( res && bestSplit->quality < split->quality )
+                memcpy( bestSplit.get(), split.get(), splitSize );
+    }
+}
+
+void DTreeBestSplitFinder::join( DTreeBestSplitFinder& rhs )
+{
+    if( bestSplit->quality < rhs.bestSplit->quality )
+        memcpy( bestSplit.get(), rhs.bestSplit.get(), splitSize );
+}
+}
+
+
+CvDTreeSplit* CvDTree::find_best_split( CvDTreeNode* node )
+{
+    DTreeBestSplitFinder finder( this, node );
+
+    cv::parallel_reduce(cv::BlockedRange(0, data->var_count), finder);
+
+    CvDTreeSplit *bestSplit = 0;
+    if( finder.bestSplit->quality > 0 )
+    {
+        bestSplit = data->new_split_cat( 0, -1.0f );
+        memcpy( bestSplit, finder.bestSplit, finder.splitSize );
+    }
+
+    return bestSplit;
+}
+
+CvDTreeSplit* CvDTree::find_split_ord_class( CvDTreeNode* node, int vi,
+                                             float init_quality, CvDTreeSplit* _split, uchar* _ext_buf )
+{
+    const float epsilon = FLT_EPSILON*2;
+    int n = node->sample_count;
+    int n1 = node->get_num_valid(vi);
+    int m = data->get_num_classes();
+
+    int base_size = 2*m*sizeof(int);
+    cv::AutoBuffer<uchar> inn_buf(base_size);
+    if( !_ext_buf )
+      inn_buf.allocate(base_size + n*(3*sizeof(int)+sizeof(float)));
+    uchar* base_buf = (uchar*)inn_buf;
+    uchar* ext_buf = _ext_buf ? _ext_buf : base_buf + base_size;
+    float* values_buf = (float*)ext_buf;
+    int* sorted_indices_buf = (int*)(values_buf + n);
+    int* sample_indices_buf = sorted_indices_buf + n;
+    const float* values = 0;
+    const int* sorted_indices = 0;
+    data->get_ord_var_data( node, vi, values_buf, sorted_indices_buf, &values,
+                            &sorted_indices, sample_indices_buf );
+    int* responses_buf =  sample_indices_buf + n;
+    const int* responses = data->get_class_labels( node, responses_buf );
+
+    const int* rc0 = data->counts->data.i;
+    int* lc = (int*)base_buf;
+    int* rc = lc + m;
+    int i, best_i = -1;
+    double lsum2 = 0, rsum2 = 0, best_val = init_quality;
+    const double* priors = data->have_priors ? data->priors_mult->data.db : 0;
+
+    // init arrays of class instance counters on both sides of the split
+    for( i = 0; i < m; i++ )
+    {
+        lc[i] = 0;
+        rc[i] = rc0[i];
+    }
+
+    // compensate for missing values
+    for( i = n1; i < n; i++ )
+    {
+        rc[responses[sorted_indices[i]]]--;
+    }
+
+    if( !priors )
+    {
+        int L = 0, R = n1;
+
+        for( i = 0; i < m; i++ )
+            rsum2 += (double)rc[i]*rc[i];
+
+        for( i = 0; i < n1 - 1; i++ )
+        {
+            int idx = responses[sorted_indices[i]];
+            int lv, rv;
+            L++; R--;
+            lv = lc[idx]; rv = rc[idx];
+            lsum2 += lv*2 + 1;
+            rsum2 -= rv*2 - 1;
+            lc[idx] = lv + 1; rc[idx] = rv - 1;
+
+            if( values[i] + epsilon < values[i+1] )
+            {
+                double val = (lsum2*R + rsum2*L)/((double)L*R);
+                if( best_val < val )
+                {
+                    best_val = val;
+                    best_i = i;
+                }
+            }
+        }
+    }
+    else
+    {
+        double L = 0, R = 0;
+        for( i = 0; i < m; i++ )
+        {
+            double wv = rc[i]*priors[i];
+            R += wv;
+            rsum2 += wv*wv;
+        }
+
+        for( i = 0; i < n1 - 1; i++ )
+        {
+            int idx = responses[sorted_indices[i]];
+            int lv, rv;
+            double p = priors[idx], p2 = p*p;
+            L += p; R -= p;
+            lv = lc[idx]; rv = rc[idx];
+            lsum2 += p2*(lv*2 + 1);
+            rsum2 -= p2*(rv*2 - 1);
+            lc[idx] = lv + 1; rc[idx] = rv - 1;
+
+            if( values[i] + epsilon < values[i+1] )
+            {
+                double val = (lsum2*R + rsum2*L)/((double)L*R);
+                if( best_val < val )
+                {
+                    best_val = val;
+                    best_i = i;
+                }
+            }
+        }
+    }
+
+    CvDTreeSplit* split = 0;
+    if( best_i >= 0 )
+    {
+        split = _split ? _split : data->new_split_ord( 0, 0.0f, 0, 0, 0.0f );
+        split->var_idx = vi;
+        split->ord.c = (values[best_i] + values[best_i+1])*0.5f;
+        split->ord.split_point = best_i;
+        split->inversed = 0;
+        split->quality = (float)best_val;
+    }
+    return split;
+}
+
+
+void CvDTree::cluster_categories( const int* vectors, int n, int m,
+                                int* csums, int k, int* labels )
+{
+    // TODO: consider adding priors (class weights) and sample weights to the clustering algorithm
+    int iters = 0, max_iters = 100;
+    int i, j, idx;
+    cv::AutoBuffer<double> buf(n + k);
+    double *v_weights = buf, *c_weights = buf + n;
+    bool modified = true;
+    RNG* r = data->rng;
+
+    // assign labels randomly
+    for( i = 0; i < n; i++ )
+    {
+        int sum = 0;
+        const int* v = vectors + i*m;
+        labels[i] = i < k ? i : r->uniform(0, k);
+
+        // compute weight of each vector
+        for( j = 0; j < m; j++ )
+            sum += v[j];
+        v_weights[i] = sum ? 1./sum : 0.;
+    }
+
+    for( i = 0; i < n; i++ )
+    {
+        int i1 = (*r)(n);
+        int i2 = (*r)(n);
+        CV_SWAP( labels[i1], labels[i2], j );
+    }
+
+    for( iters = 0; iters <= max_iters; iters++ )
+    {
+        // calculate csums
+        for( i = 0; i < k; i++ )
+        {
+            for( j = 0; j < m; j++ )
+                csums[i*m + j] = 0;
+        }
+
+        for( i = 0; i < n; i++ )
+        {
+            const int* v = vectors + i*m;
+            int* s = csums + labels[i]*m;
+            for( j = 0; j < m; j++ )
+                s[j] += v[j];
+        }
+
+        // exit the loop here, when we have up-to-date csums
+        if( iters == max_iters || !modified )
+            break;
+
+        modified = false;
+
+        // calculate weight of each cluster
+        for( i = 0; i < k; i++ )
+        {
+            const int* s = csums + i*m;
+            int sum = 0;
+            for( j = 0; j < m; j++ )
+                sum += s[j];
+            c_weights[i] = sum ? 1./sum : 0;
+        }
+
+        // now for each vector determine the closest cluster
+        for( i = 0; i < n; i++ )
+        {
+            const int* v = vectors + i*m;
+            double alpha = v_weights[i];
+            double min_dist2 = DBL_MAX;
+            int min_idx = -1;
+
+            for( idx = 0; idx < k; idx++ )
+            {
+                const int* s = csums + idx*m;
+                double dist2 = 0., beta = c_weights[idx];
+                for( j = 0; j < m; j++ )
+                {
+                    double t = v[j]*alpha - s[j]*beta;
+                    dist2 += t*t;
+                }
+                if( min_dist2 > dist2 )
+                {
+                    min_dist2 = dist2;
+                    min_idx = idx;
+                }
+            }
+
+            if( min_idx != labels[i] )
+                modified = true;
+            labels[i] = min_idx;
+        }
+    }
+}
+
+
+CvDTreeSplit* CvDTree::find_split_cat_class( CvDTreeNode* node, int vi, float init_quality,
+                                             CvDTreeSplit* _split, uchar* _ext_buf )
+{
+    int ci = data->get_var_type(vi);
+    int n = node->sample_count;
+    int m = data->get_num_classes();
+    int _mi = data->cat_count->data.i[ci], mi = _mi;
+
+    int base_size = m*(3 + mi)*sizeof(int) + (mi+1)*sizeof(double);
+    if( m > 2 && mi > data->params.max_categories )
+        base_size += (m*std::min(data->params.max_categories, n) + mi)*sizeof(int);
+    else
+        base_size += mi*sizeof(int*);
+    cv::AutoBuffer<uchar> inn_buf(base_size);
+    if( !_ext_buf )
+        inn_buf.allocate(base_size + 2*n*sizeof(int));
+    uchar* base_buf = (uchar*)inn_buf;
+    uchar* ext_buf = _ext_buf ? _ext_buf : base_buf + base_size;
+
+    int* lc = (int*)base_buf;
+    int* rc = lc + m;
+    int* _cjk = rc + m*2, *cjk = _cjk;
+    double* c_weights = (double*)alignPtr(cjk + m*mi, sizeof(double));
+
+    int* labels_buf = (int*)ext_buf;
+    const int* labels = data->get_cat_var_data(node, vi, labels_buf);
+    int* responses_buf = labels_buf + n;
+    const int* responses = data->get_class_labels(node, responses_buf);
+
+    int* cluster_labels = 0;
+    int** int_ptr = 0;
+    int i, j, k, idx;
+    double L = 0, R = 0;
+    double best_val = init_quality;
+    int prevcode = 0, best_subset = -1, subset_i, subset_n, subtract = 0;
+    const double* priors = data->priors_mult->data.db;
+
+    // init array of counters:
+    // c_{jk} - number of samples that have vi-th input variable = j and response = k.
+    for( j = -1; j < mi; j++ )
+        for( k = 0; k < m; k++ )
+            cjk[j*m + k] = 0;
+
+    for( i = 0; i < n; i++ )
+    {
+       j = ( labels[i] == 65535 && data->is_buf_16u) ? -1 : labels[i];
+       k = responses[i];
+       cjk[j*m + k]++;
+    }
+
+    if( m > 2 )
+    {
+        if( mi > data->params.max_categories )
+        {
+            mi = MIN(data->params.max_categories, n);
+            cjk = (int*)(c_weights + _mi);
+            cluster_labels = cjk + m*mi;
+            cluster_categories( _cjk, _mi, m, cjk, mi, cluster_labels );
+        }
+        subset_i = 1;
+        subset_n = 1 << mi;
+    }
+    else
+    {
+        assert( m == 2 );
+        int_ptr = (int**)(c_weights + _mi);
+        for( j = 0; j < mi; j++ )
+            int_ptr[j] = cjk + j*2 + 1;
+        std::sort(int_ptr, int_ptr + mi, LessThanPtr<int>());
+        subset_i = 0;
+        subset_n = mi;
+    }
+
+    for( k = 0; k < m; k++ )
+    {
+        int sum = 0;
+        for( j = 0; j < mi; j++ )
+            sum += cjk[j*m + k];
+        rc[k] = sum;
+        lc[k] = 0;
+    }
+
+    for( j = 0; j < mi; j++ )
+    {
+        double sum = 0;
+        for( k = 0; k < m; k++ )
+            sum += cjk[j*m + k]*priors[k];
+        c_weights[j] = sum;
+        R += c_weights[j];
+    }
+
+    for( ; subset_i < subset_n; subset_i++ )
+    {
+        double weight;
+        int* crow;
+        double lsum2 = 0, rsum2 = 0;
+
+        if( m == 2 )
+            idx = (int)(int_ptr[subset_i] - cjk)/2;
+        else
+        {
+            int graycode = (subset_i>>1)^subset_i;
+            int diff = graycode ^ prevcode;
+
+            // determine index of the changed bit.
+            Cv32suf u;
+            idx = diff >= (1 << 16) ? 16 : 0;
+            u.f = (float)(((diff >> 16) | diff) & 65535);
+            idx += (u.i >> 23) - 127;
+            subtract = graycode < prevcode;
+            prevcode = graycode;
+        }
+
+        crow = cjk + idx*m;
+        weight = c_weights[idx];
+        if( weight < FLT_EPSILON )
+            continue;
+
+        if( !subtract )
+        {
+            for( k = 0; k < m; k++ )
+            {
+                int t = crow[k];
+                int lval = lc[k] + t;
+                int rval = rc[k] - t;
+                double p = priors[k], p2 = p*p;
+                lsum2 += p2*lval*lval;
+                rsum2 += p2*rval*rval;
+                lc[k] = lval; rc[k] = rval;
+            }
+            L += weight;
+            R -= weight;
+        }
+        else
+        {
+            for( k = 0; k < m; k++ )
+            {
+                int t = crow[k];
+                int lval = lc[k] - t;
+                int rval = rc[k] + t;
+                double p = priors[k], p2 = p*p;
+                lsum2 += p2*lval*lval;
+                rsum2 += p2*rval*rval;
+                lc[k] = lval; rc[k] = rval;
+            }
+            L -= weight;
+            R += weight;
+        }
+
+        if( L > FLT_EPSILON && R > FLT_EPSILON )
+        {
+            double val = (lsum2*R + rsum2*L)/((double)L*R);
+            if( best_val < val )
+            {
+                best_val = val;
+                best_subset = subset_i;
+            }
+        }
+    }
+
+    CvDTreeSplit* split = 0;
+    if( best_subset >= 0 )
+    {
+        split = _split ? _split : data->new_split_cat( 0, -1.0f );
+        split->var_idx = vi;
+        split->quality = (float)best_val;
+        memset( split->subset, 0, (data->max_c_count + 31)/32 * sizeof(int));
+        if( m == 2 )
+        {
+            for( i = 0; i <= best_subset; i++ )
+            {
+                idx = (int)(int_ptr[i] - cjk) >> 1;
+                split->subset[idx >> 5] |= 1 << (idx & 31);
+            }
+        }
+        else
+        {
+            for( i = 0; i < _mi; i++ )
+            {
+                idx = cluster_labels ? cluster_labels[i] : i;
+                if( best_subset & (1 << idx) )
+                    split->subset[i >> 5] |= 1 << (i & 31);
+            }
+        }
+    }
+    return split;
+}
+
+
+CvDTreeSplit* CvDTree::find_split_ord_reg( CvDTreeNode* node, int vi, float init_quality, CvDTreeSplit* _split, uchar* _ext_buf )
+{
+    const float epsilon = FLT_EPSILON*2;
+    int n = node->sample_count;
+    int n1 = node->get_num_valid(vi);
+
+    cv::AutoBuffer<uchar> inn_buf;
+    if( !_ext_buf )
+        inn_buf.allocate(2*n*(sizeof(int) + sizeof(float)));
+    uchar* ext_buf = _ext_buf ? _ext_buf : (uchar*)inn_buf;
+    float* values_buf = (float*)ext_buf;
+    int* sorted_indices_buf = (int*)(values_buf + n);
+    int* sample_indices_buf = sorted_indices_buf + n;
+    const float* values = 0;
+    const int* sorted_indices = 0;
+    data->get_ord_var_data( node, vi, values_buf, sorted_indices_buf, &values, &sorted_indices, sample_indices_buf );
+    float* responses_buf =  (float*)(sample_indices_buf + n);
+    const float* responses = data->get_ord_responses( node, responses_buf, sample_indices_buf );
+
+    int i, best_i = -1;
+    double best_val = init_quality, lsum = 0, rsum = node->value*n;
+    int L = 0, R = n1;
+
+    // compensate for missing values
+    for( i = n1; i < n; i++ )
+        rsum -= responses[sorted_indices[i]];
+
+    // find the optimal split
+    for( i = 0; i < n1 - 1; i++ )
+    {
+        float t = responses[sorted_indices[i]];
+        L++; R--;
+        lsum += t;
+        rsum -= t;
+
+        if( values[i] + epsilon < values[i+1] )
+        {
+            double val = (lsum*lsum*R + rsum*rsum*L)/((double)L*R);
+            if( best_val < val )
+            {
+                best_val = val;
+                best_i = i;
+            }
+        }
+    }
+
+    CvDTreeSplit* split = 0;
+    if( best_i >= 0 )
+    {
+        split = _split ? _split : data->new_split_ord( 0, 0.0f, 0, 0, 0.0f );
+        split->var_idx = vi;
+        split->ord.c = (values[best_i] + values[best_i+1])*0.5f;
+        split->ord.split_point = best_i;
+        split->inversed = 0;
+        split->quality = (float)best_val;
+    }
+    return split;
+}
+
+CvDTreeSplit* CvDTree::find_split_cat_reg( CvDTreeNode* node, int vi, float init_quality, CvDTreeSplit* _split, uchar* _ext_buf )
+{
+    int ci = data->get_var_type(vi);
+    int n = node->sample_count;
+    int mi = data->cat_count->data.i[ci];
+
+    int base_size = (mi+2)*sizeof(double) + (mi+1)*(sizeof(int) + sizeof(double*));
+    cv::AutoBuffer<uchar> inn_buf(base_size);
+    if( !_ext_buf )
+        inn_buf.allocate(base_size + n*(2*sizeof(int) + sizeof(float)));
+    uchar* base_buf = (uchar*)inn_buf;
+    uchar* ext_buf = _ext_buf ? _ext_buf : base_buf + base_size;
+    int* labels_buf = (int*)ext_buf;
+    const int* labels = data->get_cat_var_data(node, vi, labels_buf);
+    float* responses_buf = (float*)(labels_buf + n);
+    int* sample_indices_buf = (int*)(responses_buf + n);
+    const float* responses = data->get_ord_responses(node, responses_buf, sample_indices_buf);
+
+    double* sum = (double*)cv::alignPtr(base_buf,sizeof(double)) + 1;
+    int* counts = (int*)(sum + mi) + 1;
+    double** sum_ptr = (double**)(counts + mi);
+    int i, L = 0, R = 0;
+    double best_val = init_quality, lsum = 0, rsum = 0;
+    int best_subset = -1, subset_i;
+
+    for( i = -1; i < mi; i++ )
+        sum[i] = counts[i] = 0;
+
+    // calculate sum response and weight of each category of the input var
+    for( i = 0; i < n; i++ )
+    {
+        int idx = ( (labels[i] == 65535) && data->is_buf_16u ) ? -1 : labels[i];
+        double s = sum[idx] + responses[i];
+        int nc = counts[idx] + 1;
+        sum[idx] = s;
+        counts[idx] = nc;
+    }
+
+    // calculate average response in each category
+    for( i = 0; i < mi; i++ )
+    {
+        R += counts[i];
+        rsum += sum[i];
+        sum[i] /= MAX(counts[i],1);
+        sum_ptr[i] = sum + i;
+    }
+
+    std::sort(sum_ptr, sum_ptr + mi, LessThanPtr<double>());
+
+    // revert back to unnormalized sums
+    // (there should be a very little loss of accuracy)
+    for( i = 0; i < mi; i++ )
+        sum[i] *= counts[i];
+
+    for( subset_i = 0; subset_i < mi-1; subset_i++ )
+    {
+        int idx = (int)(sum_ptr[subset_i] - sum);
+        int ni = counts[idx];
+
+        if( ni )
+        {
+            double s = sum[idx];
+            lsum += s; L += ni;
+            rsum -= s; R -= ni;
+
+            if( L && R )
+            {
+                double val = (lsum*lsum*R + rsum*rsum*L)/((double)L*R);
+                if( best_val < val )
+                {
+                    best_val = val;
+                    best_subset = subset_i;
+                }
+            }
+        }
+    }
+
+    CvDTreeSplit* split = 0;
+    if( best_subset >= 0 )
+    {
+        split = _split ? _split : data->new_split_cat( 0, -1.0f);
+        split->var_idx = vi;
+        split->quality = (float)best_val;
+        memset( split->subset, 0, (data->max_c_count + 31)/32 * sizeof(int));
+        for( i = 0; i <= best_subset; i++ )
+        {
+            int idx = (int)(sum_ptr[i] - sum);
+            split->subset[idx >> 5] |= 1 << (idx & 31);
+        }
+    }
+    return split;
+}
+
+CvDTreeSplit* CvDTree::find_surrogate_split_ord( CvDTreeNode* node, int vi, uchar* _ext_buf )
+{
+    const float epsilon = FLT_EPSILON*2;
+    const char* dir = (char*)data->direction->data.ptr;
+    int n = node->sample_count, n1 = node->get_num_valid(vi);
+    cv::AutoBuffer<uchar> inn_buf;
+    if( !_ext_buf )
+        inn_buf.allocate( n*(sizeof(int)*(data->have_priors ? 3 : 2) + sizeof(float)) );
+    uchar* ext_buf = _ext_buf ? _ext_buf : (uchar*)inn_buf;
+    float* values_buf = (float*)ext_buf;
+    int* sorted_indices_buf = (int*)(values_buf + n);
+    int* sample_indices_buf = sorted_indices_buf + n;
+    const float* values = 0;
+    const int* sorted_indices = 0;
+    data->get_ord_var_data( node, vi, values_buf, sorted_indices_buf, &values, &sorted_indices, sample_indices_buf );
+    // LL - number of samples that both the primary and the surrogate splits send to the left
+    // LR - ... primary split sends to the left and the surrogate split sends to the right
+    // RL - ... primary split sends to the right and the surrogate split sends to the left
+    // RR - ... both send to the right
+    int i, best_i = -1, best_inversed = 0;
+    double best_val;
+
+    if( !data->have_priors )
+    {
+        int LL = 0, RL = 0, LR, RR;
+        int worst_val = cvFloor(node->maxlr), _best_val = worst_val;
+        int sum = 0, sum_abs = 0;
+
+        for( i = 0; i < n1; i++ )
+        {
+            int d = dir[sorted_indices[i]];
+            sum += d; sum_abs += d & 1;
+        }
+
+        // sum_abs = R + L; sum = R - L
+        RR = (sum_abs + sum) >> 1;
+        LR = (sum_abs - sum) >> 1;
+
+        // initially all the samples are sent to the right by the surrogate split,
+        // LR of them are sent to the left by primary split, and RR - to the right.
+        // now iteratively compute LL, LR, RL and RR for every possible surrogate split value.
+        for( i = 0; i < n1 - 1; i++ )
+        {
+            int d = dir[sorted_indices[i]];
+
+            if( d < 0 )
+            {
+                LL++; LR--;
+                if( LL + RR > _best_val && values[i] + epsilon < values[i+1] )
+                {
+                    best_val = LL + RR;
+                    best_i = i; best_inversed = 0;
+                }
+            }
+            else if( d > 0 )
+            {
+                RL++; RR--;
+                if( RL + LR > _best_val && values[i] + epsilon < values[i+1] )
+                {
+                    best_val = RL + LR;
+                    best_i = i; best_inversed = 1;
+                }
+            }
+        }
+        best_val = _best_val;
+    }
+    else
+    {
+        double LL = 0, RL = 0, LR, RR;
+        double worst_val = node->maxlr;
+        double sum = 0, sum_abs = 0;
+        const double* priors = data->priors_mult->data.db;
+        int* responses_buf = sample_indices_buf + n;
+        const int* responses = data->get_class_labels(node, responses_buf);
+        best_val = worst_val;
+
+        for( i = 0; i < n1; i++ )
+        {
+            int idx = sorted_indices[i];
+            double w = priors[responses[idx]];
+            int d = dir[idx];
+            sum += d*w; sum_abs += (d & 1)*w;
+        }
+
+        // sum_abs = R + L; sum = R - L
+        RR = (sum_abs + sum)*0.5;
+        LR = (sum_abs - sum)*0.5;
+
+        // initially all the samples are sent to the right by the surrogate split,
+        // LR of them are sent to the left by primary split, and RR - to the right.
+        // now iteratively compute LL, LR, RL and RR for every possible surrogate split value.
+        for( i = 0; i < n1 - 1; i++ )
+        {
+            int idx = sorted_indices[i];
+            double w = priors[responses[idx]];
+            int d = dir[idx];
+
+            if( d < 0 )
+            {
+                LL += w; LR -= w;
+                if( LL + RR > best_val && values[i] + epsilon < values[i+1] )
+                {
+                    best_val = LL + RR;
+                    best_i = i; best_inversed = 0;
+                }
+            }
+            else if( d > 0 )
+            {
+                RL += w; RR -= w;
+                if( RL + LR > best_val && values[i] + epsilon < values[i+1] )
+                {
+                    best_val = RL + LR;
+                    best_i = i; best_inversed = 1;
+                }
+            }
+        }
+    }
+    return best_i >= 0 && best_val > node->maxlr ? data->new_split_ord( vi,
+        (values[best_i] + values[best_i+1])*0.5f, best_i, best_inversed, (float)best_val ) : 0;
+}
+
+
+CvDTreeSplit* CvDTree::find_surrogate_split_cat( CvDTreeNode* node, int vi, uchar* _ext_buf )
+{
+    const char* dir = (char*)data->direction->data.ptr;
+    int n = node->sample_count;
+    int i, mi = data->cat_count->data.i[data->get_var_type(vi)], l_win = 0;
+
+    int base_size = (2*(mi+1)+1)*sizeof(double) + (!data->have_priors ? 2*(mi+1)*sizeof(int) : 0);
+    cv::AutoBuffer<uchar> inn_buf(base_size);
+    if( !_ext_buf )
+        inn_buf.allocate(base_size + n*(sizeof(int) + (data->have_priors ? sizeof(int) : 0)));
+    uchar* base_buf = (uchar*)inn_buf;
+    uchar* ext_buf = _ext_buf ? _ext_buf : base_buf + base_size;
+
+    int* labels_buf = (int*)ext_buf;
+    const int* labels = data->get_cat_var_data(node, vi, labels_buf);
+    // LL - number of samples that both the primary and the surrogate splits send to the left
+    // LR - ... primary split sends to the left and the surrogate split sends to the right
+    // RL - ... primary split sends to the right and the surrogate split sends to the left
+    // RR - ... both send to the right
+    CvDTreeSplit* split = data->new_split_cat( vi, 0 );
+    double best_val = 0;
+    double* lc = (double*)cv::alignPtr(base_buf,sizeof(double)) + 1;
+    double* rc = lc + mi + 1;
+
+    for( i = -1; i < mi; i++ )
+        lc[i] = rc[i] = 0;
+
+    // for each category calculate the weight of samples
+    // sent to the left (lc) and to the right (rc) by the primary split
+    if( !data->have_priors )
+    {
+        int* _lc = (int*)rc + 1;
+        int* _rc = _lc + mi + 1;
+
+        for( i = -1; i < mi; i++ )
+            _lc[i] = _rc[i] = 0;
+
+        for( i = 0; i < n; i++ )
+        {
+            int idx = ( (labels[i] == 65535) && (data->is_buf_16u) ) ? -1 : labels[i];
+            int d = dir[i];
+            int sum = _lc[idx] + d;
+            int sum_abs = _rc[idx] + (d & 1);
+            _lc[idx] = sum; _rc[idx] = sum_abs;
+        }
+
+        for( i = 0; i < mi; i++ )
+        {
+            int sum = _lc[i];
+            int sum_abs = _rc[i];
+            lc[i] = (sum_abs - sum) >> 1;
+            rc[i] = (sum_abs + sum) >> 1;
+        }
+    }
+    else
+    {
+        const double* priors = data->priors_mult->data.db;
+        int* responses_buf = labels_buf + n;
+        const int* responses = data->get_class_labels(node, responses_buf);
+
+        for( i = 0; i < n; i++ )
+        {
+            int idx = ( (labels[i] == 65535) && (data->is_buf_16u) ) ? -1 : labels[i];
+            double w = priors[responses[i]];
+            int d = dir[i];
+            double sum = lc[idx] + d*w;
+            double sum_abs = rc[idx] + (d & 1)*w;
+            lc[idx] = sum; rc[idx] = sum_abs;
+        }
+
+        for( i = 0; i < mi; i++ )
+        {
+            double sum = lc[i];
+            double sum_abs = rc[i];
+            lc[i] = (sum_abs - sum) * 0.5;
+            rc[i] = (sum_abs + sum) * 0.5;
+        }
+    }
+
+    // 2. now form the split.
+    // in each category send all the samples to the same direction as majority
+    for( i = 0; i < mi; i++ )
+    {
+        double lval = lc[i], rval = rc[i];
+        if( lval > rval )
+        {
+            split->subset[i >> 5] |= 1 << (i & 31);
+            best_val += lval;
+            l_win++;
+        }
+        else
+            best_val += rval;
+    }
+
+    split->quality = (float)best_val;
+    if( split->quality <= node->maxlr || l_win == 0 || l_win == mi )
+        cvSetRemoveByPtr( data->split_heap, split ), split = 0;
+
+    return split;
+}
+
+
+void CvDTree::calc_node_value( CvDTreeNode* node )
+{
+    int i, j, k, n = node->sample_count, cv_n = data->params.cv_folds;
+    int m = data->get_num_classes();
+
+    int base_size = data->is_classifier ? m*cv_n*sizeof(int) : 2*cv_n*sizeof(double)+cv_n*sizeof(int);
+    int ext_size = n*(sizeof(int) + (data->is_classifier ? sizeof(int) : sizeof(int)+sizeof(float)));
+    cv::AutoBuffer<uchar> inn_buf(base_size + ext_size);
+    uchar* base_buf = (uchar*)inn_buf;
+    uchar* ext_buf = base_buf + base_size;
+
+    int* cv_labels_buf = (int*)ext_buf;
+    const int* cv_labels = data->get_cv_labels(node, cv_labels_buf);
+
+    if( data->is_classifier )
+    {
+        // in case of classification tree:
+        //  * node value is the label of the class that has the largest weight in the node.
+        //  * node risk is the weighted number of misclassified samples,
+        //  * j-th cross-validation fold value and risk are calculated as above,
+        //    but using the samples with cv_labels(*)!=j.
+        //  * j-th cross-validation fold error is calculated as the weighted number of
+        //    misclassified samples with cv_labels(*)==j.
+
+        // compute the number of instances of each class
+        int* cls_count = data->counts->data.i;
+        int* responses_buf = cv_labels_buf + n;
+        const int* responses = data->get_class_labels(node, responses_buf);
+        int* cv_cls_count = (int*)base_buf;
+        double max_val = -1, total_weight = 0;
+        int max_k = -1;
+        double* priors = data->priors_mult->data.db;
+
+        for( k = 0; k < m; k++ )
+            cls_count[k] = 0;
+
+        if( cv_n == 0 )
+        {
+            for( i = 0; i < n; i++ )
+                cls_count[responses[i]]++;
+        }
+        else
+        {
+            for( j = 0; j < cv_n; j++ )
+                for( k = 0; k < m; k++ )
+                    cv_cls_count[j*m + k] = 0;
+
+            for( i = 0; i < n; i++ )
+            {
+                j = cv_labels[i]; k = responses[i];
+                cv_cls_count[j*m + k]++;
+            }
+
+            for( j = 0; j < cv_n; j++ )
+                for( k = 0; k < m; k++ )
+                    cls_count[k] += cv_cls_count[j*m + k];
+        }
+
+        if( data->have_priors && node->parent == 0 )
+        {
+            // compute priors_mult from priors, take the sample ratio into account.
+            double sum = 0;
+            for( k = 0; k < m; k++ )
+            {
+                int n_k = cls_count[k];
+                priors[k] = data->priors->data.db[k]*(n_k ? 1./n_k : 0.);
+                sum += priors[k];
+            }
+            sum = 1./sum;
+            for( k = 0; k < m; k++ )
+                priors[k] *= sum;
+        }
+
+        for( k = 0; k < m; k++ )
+        {
+            double val = cls_count[k]*priors[k];
+            total_weight += val;
+            if( max_val < val )
+            {
+                max_val = val;
+                max_k = k;
+            }
+        }
+
+        node->class_idx = max_k;
+        node->value = data->cat_map->data.i[
+            data->cat_ofs->data.i[data->cat_var_count] + max_k];
+        node->node_risk = total_weight - max_val;
+
+        for( j = 0; j < cv_n; j++ )
+        {
+            double sum_k = 0, sum = 0, max_val_k = 0;
+            max_val = -1; max_k = -1;
+
+            for( k = 0; k < m; k++ )
+            {
+                double w = priors[k];
+                double val_k = cv_cls_count[j*m + k]*w;
+                double val = cls_count[k]*w - val_k;
+                sum_k += val_k;
+                sum += val;
+                if( max_val < val )
+                {
+                    max_val = val;
+                    max_val_k = val_k;
+                    max_k = k;
+                }
+            }
+
+            node->cv_Tn[j] = INT_MAX;
+            node->cv_node_risk[j] = sum - max_val;
+            node->cv_node_error[j] = sum_k - max_val_k;
+        }
+    }
+    else
+    {
+        // in case of regression tree:
+        //  * node value is 1/n*sum_i(Y_i), where Y_i is i-th response,
+        //    n is the number of samples in the node.
+        //  * node risk is the sum of squared errors: sum_i((Y_i - <node_value>)^2)
+        //  * j-th cross-validation fold value and risk are calculated as above,
+        //    but using the samples with cv_labels(*)!=j.
+        //  * j-th cross-validation fold error is calculated
+        //    using samples with cv_labels(*)==j as the test subset:
+        //    error_j = sum_(i,cv_labels(i)==j)((Y_i - <node_value_j>)^2),
+        //    where node_value_j is the node value calculated
+        //    as described in the previous bullet, and summation is done
+        //    over the samples with cv_labels(*)==j.
+
+        double sum = 0, sum2 = 0;
+        float* values_buf = (float*)(cv_labels_buf + n);
+        int* sample_indices_buf = (int*)(values_buf + n);
+        const float* values = data->get_ord_responses(node, values_buf, sample_indices_buf);
+        double *cv_sum = 0, *cv_sum2 = 0;
+        int* cv_count = 0;
+
+        if( cv_n == 0 )
+        {
+            for( i = 0; i < n; i++ )
+            {
+                double t = values[i];
+                sum += t;
+                sum2 += t*t;
+            }
+        }
+        else
+        {
+            cv_sum = (double*)base_buf;
+            cv_sum2 = cv_sum + cv_n;
+            cv_count = (int*)(cv_sum2 + cv_n);
+
+            for( j = 0; j < cv_n; j++ )
+            {
+                cv_sum[j] = cv_sum2[j] = 0.;
+                cv_count[j] = 0;
+            }
+
+            for( i = 0; i < n; i++ )
+            {
+                j = cv_labels[i];
+                double t = values[i];
+                double s = cv_sum[j] + t;
+                double s2 = cv_sum2[j] + t*t;
+                int nc = cv_count[j] + 1;
+                cv_sum[j] = s;
+                cv_sum2[j] = s2;
+                cv_count[j] = nc;
+            }
+
+            for( j = 0; j < cv_n; j++ )
+            {
+                sum += cv_sum[j];
+                sum2 += cv_sum2[j];
+            }
+        }
+
+        node->node_risk = sum2 - (sum/n)*sum;
+        node->value = sum/n;
+
+        for( j = 0; j < cv_n; j++ )
+        {
+            double s = cv_sum[j], si = sum - s;
+            double s2 = cv_sum2[j], s2i = sum2 - s2;
+            int c = cv_count[j], ci = n - c;
+            double r = si/MAX(ci,1);
+            node->cv_node_risk[j] = s2i - r*r*ci;
+            node->cv_node_error[j] = s2 - 2*r*s + c*r*r;
+            node->cv_Tn[j] = INT_MAX;
+        }
+    }
+}
+
+
+void CvDTree::complete_node_dir( CvDTreeNode* node )
+{
+    int vi, i, n = node->sample_count, nl, nr, d0 = 0, d1 = -1;
+    int nz = n - node->get_num_valid(node->split->var_idx);
+    char* dir = (char*)data->direction->data.ptr;
+
+    // try to complete direction using surrogate splits
+    if( nz && data->params.use_surrogates )
+    {
+        cv::AutoBuffer<uchar> inn_buf(n*(2*sizeof(int)+sizeof(float)));
+        CvDTreeSplit* split = node->split->next;
+        for( ; split != 0 && nz; split = split->next )
+        {
+            int inversed_mask = split->inversed ? -1 : 0;
+            vi = split->var_idx;
+
+            if( data->get_var_type(vi) >= 0 ) // split on categorical var
+            {
+                int* labels_buf = (int*)(uchar*)inn_buf;
+                const int* labels = data->get_cat_var_data(node, vi, labels_buf);
+                const int* subset = split->subset;
+
+                for( i = 0; i < n; i++ )
+                {
+                    int idx = labels[i];
+                    if( !dir[i] && ( ((idx >= 0)&&(!data->is_buf_16u)) || ((idx != 65535)&&(data->is_buf_16u)) ))
+
+                    {
+                        int d = CV_DTREE_CAT_DIR(idx,subset);
+                        dir[i] = (char)((d ^ inversed_mask) - inversed_mask);
+                        if( --nz )
+                            break;
+                    }
+                }
+            }
+            else // split on ordered var
+            {
+                float* values_buf = (float*)(uchar*)inn_buf;
+                int* sorted_indices_buf = (int*)(values_buf + n);
+                int* sample_indices_buf = sorted_indices_buf + n;
+                const float* values = 0;
+                const int* sorted_indices = 0;
+                data->get_ord_var_data( node, vi, values_buf, sorted_indices_buf, &values, &sorted_indices, sample_indices_buf );
+                int split_point = split->ord.split_point;
+                int n1 = node->get_num_valid(vi);
+
+                assert( 0 <= split_point && split_point < n-1 );
+
+                for( i = 0; i < n1; i++ )
+                {
+                    int idx = sorted_indices[i];
+                    if( !dir[idx] )
+                    {
+                        int d = i <= split_point ? -1 : 1;
+                        dir[idx] = (char)((d ^ inversed_mask) - inversed_mask);
+                        if( --nz )
+                            break;
+                    }
+                }
+            }
+        }
+    }
+
+    // find the default direction for the rest
+    if( nz )
+    {
+        for( i = nr = 0; i < n; i++ )
+            nr += dir[i] > 0;
+        nl = n - nr - nz;
+        d0 = nl > nr ? -1 : nr > nl;
+    }
+
+    // make sure that every sample is directed either to the left or to the right
+    for( i = 0; i < n; i++ )
+    {
+        int d = dir[i];
+        if( !d )
+        {
+            d = d0;
+            if( !d )
+                d = d1, d1 = -d1;
+        }
+        d = d > 0;
+        dir[i] = (char)d; // remap (-1,1) to (0,1)
+    }
+}
+
+
+void CvDTree::split_node_data( CvDTreeNode* node )
+{
+    int vi, i, n = node->sample_count, nl, nr, scount = data->sample_count;
+    char* dir = (char*)data->direction->data.ptr;
+    CvDTreeNode *left = 0, *right = 0;
+    int* new_idx = data->split_buf->data.i;
+    int new_buf_idx = data->get_child_buf_idx( node );
+    int work_var_count = data->get_work_var_count();
+    CvMat* buf = data->buf;
+    size_t length_buf_row = data->get_length_subbuf();
+    cv::AutoBuffer<uchar> inn_buf(n*(3*sizeof(int) + sizeof(float)));
+    int* temp_buf = (int*)(uchar*)inn_buf;
+
+    complete_node_dir(node);
+
+    for( i = nl = nr = 0; i < n; i++ )
+    {
+        int d = dir[i];
+        // initialize new indices for splitting ordered variables
+        new_idx[i] = (nl & (d-1)) | (nr & -d); // d ? ri : li
+        nr += d;
+        nl += d^1;
+    }
+
+    bool split_input_data;
+    node->left = left = data->new_node( node, nl, new_buf_idx, node->offset );
+    node->right = right = data->new_node( node, nr, new_buf_idx, node->offset + nl );
+
+    split_input_data = node->depth + 1 < data->params.max_depth &&
+        (node->left->sample_count > data->params.min_sample_count ||
+        node->right->sample_count > data->params.min_sample_count);
+
+    // split ordered variables, keep both halves sorted.
+    for( vi = 0; vi < data->var_count; vi++ )
+    {
+        int ci = data->get_var_type(vi);
+
+        if( ci >= 0 || !split_input_data )
+            continue;
+
+        int n1 = node->get_num_valid(vi);
+        float* src_val_buf = (float*)(uchar*)(temp_buf + n);
+        int* src_sorted_idx_buf = (int*)(src_val_buf + n);
+        int* src_sample_idx_buf = src_sorted_idx_buf + n;
+        const float* src_val = 0;
+        const int* src_sorted_idx = 0;
+        data->get_ord_var_data(node, vi, src_val_buf, src_sorted_idx_buf, &src_val, &src_sorted_idx, src_sample_idx_buf);
+
+        for(i = 0; i < n; i++)
+            temp_buf[i] = src_sorted_idx[i];
+
+        if (data->is_buf_16u)
+        {
+            unsigned short *ldst, *rdst, *ldst0, *rdst0;
+            //unsigned short tl, tr;
+            ldst0 = ldst = (unsigned short*)(buf->data.s + left->buf_idx*length_buf_row +
+                vi*scount + left->offset);
+            rdst0 = rdst = (unsigned short*)(ldst + nl);
+
+            // split sorted
+            for( i = 0; i < n1; i++ )
+            {
+                int idx = temp_buf[i];
+                int d = dir[idx];
+                idx = new_idx[idx];
+                if (d)
+                {
+                    *rdst = (unsigned short)idx;
+                    rdst++;
+                }
+                else
+                {
+                    *ldst = (unsigned short)idx;
+                    ldst++;
+                }
+            }
+
+            left->set_num_valid(vi, (int)(ldst - ldst0));
+            right->set_num_valid(vi, (int)(rdst - rdst0));
+
+            // split missing
+            for( ; i < n; i++ )
+            {
+                int idx = temp_buf[i];
+                int d = dir[idx];
+                idx = new_idx[idx];
+                if (d)
+                {
+                    *rdst = (unsigned short)idx;
+                    rdst++;
+                }
+                else
+                {
+                    *ldst = (unsigned short)idx;
+                    ldst++;
+                }
+            }
+        }
+        else
+        {
+            int *ldst0, *ldst, *rdst0, *rdst;
+            ldst0 = ldst = buf->data.i + left->buf_idx*length_buf_row +
+                vi*scount + left->offset;
+            rdst0 = rdst = buf->data.i + right->buf_idx*length_buf_row +
+                vi*scount + right->offset;
+
+            // split sorted
+            for( i = 0; i < n1; i++ )
+            {
+                int idx = temp_buf[i];
+                int d = dir[idx];
+                idx = new_idx[idx];
+                if (d)
+                {
+                    *rdst = idx;
+                    rdst++;
+                }
+                else
+                {
+                    *ldst = idx;
+                    ldst++;
+                }
+            }
+
+            left->set_num_valid(vi, (int)(ldst - ldst0));
+            right->set_num_valid(vi, (int)(rdst - rdst0));
+
+            // split missing
+            for( ; i < n; i++ )
+            {
+                int idx = temp_buf[i];
+                int d = dir[idx];
+                idx = new_idx[idx];
+                if (d)
+                {
+                    *rdst = idx;
+                    rdst++;
+                }
+                else
+                {
+                    *ldst = idx;
+                    ldst++;
+                }
+            }
+        }
+    }
+
+    // split categorical vars, responses and cv_labels using new_idx relocation table
+    for( vi = 0; vi < work_var_count; vi++ )
+    {
+        int ci = data->get_var_type(vi);
+        int n1 = node->get_num_valid(vi), nr1 = 0;
+
+        if( ci < 0 || (vi < data->var_count && !split_input_data) )
+            continue;
+
+        int *src_lbls_buf = temp_buf + n;
+        const int* src_lbls = data->get_cat_var_data(node, vi, src_lbls_buf);
+
+        for(i = 0; i < n; i++)
+            temp_buf[i] = src_lbls[i];
+
+        if (data->is_buf_16u)
+        {
+            unsigned short *ldst = (unsigned short *)(buf->data.s + left->buf_idx*length_buf_row +
+                vi*scount + left->offset);
+            unsigned short *rdst = (unsigned short *)(buf->data.s + right->buf_idx*length_buf_row +
+                vi*scount + right->offset);
+
+            for( i = 0; i < n; i++ )
+            {
+                int d = dir[i];
+                int idx = temp_buf[i];
+                if (d)
+                {
+                    *rdst = (unsigned short)idx;
+                    rdst++;
+                    nr1 += (idx != 65535 )&d;
+                }
+                else
+                {
+                    *ldst = (unsigned short)idx;
+                    ldst++;
+                }
+            }
+
+            if( vi < data->var_count )
+            {
+                left->set_num_valid(vi, n1 - nr1);
+                right->set_num_valid(vi, nr1);
+            }
+        }
+        else
+        {
+            int *ldst = buf->data.i + left->buf_idx*length_buf_row +
+                vi*scount + left->offset;
+            int *rdst = buf->data.i + right->buf_idx*length_buf_row +
+                vi*scount + right->offset;
+
+            for( i = 0; i < n; i++ )
+            {
+                int d = dir[i];
+                int idx = temp_buf[i];
+                if (d)
+                {
+                    *rdst = idx;
+                    rdst++;
+                    nr1 += (idx >= 0)&d;
+                }
+                else
+                {
+                    *ldst = idx;
+                    ldst++;
+                }
+
+            }
+
+            if( vi < data->var_count )
+            {
+                left->set_num_valid(vi, n1 - nr1);
+                right->set_num_valid(vi, nr1);
+            }
+        }
+    }
+
+
+    // split sample indices
+    int *sample_idx_src_buf = temp_buf + n;
+    const int* sample_idx_src = data->get_sample_indices(node, sample_idx_src_buf);
+
+    for(i = 0; i < n; i++)
+        temp_buf[i] = sample_idx_src[i];
+
+    int pos = data->get_work_var_count();
+    if (data->is_buf_16u)
+    {
+        unsigned short* ldst = (unsigned short*)(buf->data.s + left->buf_idx*length_buf_row +
+            pos*scount + left->offset);
+        unsigned short* rdst = (unsigned short*)(buf->data.s + right->buf_idx*length_buf_row +
+            pos*scount + right->offset);
+        for (i = 0; i < n; i++)
+        {
+            int d = dir[i];
+            unsigned short idx = (unsigned short)temp_buf[i];
+            if (d)
+            {
+                *rdst = idx;
+                rdst++;
+            }
+            else
+            {
+                *ldst = idx;
+                ldst++;
+            }
+        }
+    }
+    else
+    {
+        int* ldst = buf->data.i + left->buf_idx*length_buf_row +
+            pos*scount + left->offset;
+        int* rdst = buf->data.i + right->buf_idx*length_buf_row +
+            pos*scount + right->offset;
+        for (i = 0; i < n; i++)
+        {
+            int d = dir[i];
+            int idx = temp_buf[i];
+            if (d)
+            {
+                *rdst = idx;
+                rdst++;
+            }
+            else
+            {
+                *ldst = idx;
+                ldst++;
+            }
+        }
+    }
+
+    // deallocate the parent node data that is not needed anymore
+    data->free_node_data(node);
+}
+
+float CvDTree::calc_error( CvMLData* _data, int type, std::vector<float> *resp )
+{
+    float err = 0;
+    const CvMat* values = _data->get_values();
+    const CvMat* response = _data->get_responses();
+    const CvMat* missing = _data->get_missing();
+    const CvMat* sample_idx = (type == CV_TEST_ERROR) ? _data->get_test_sample_idx() : _data->get_train_sample_idx();
+    const CvMat* var_types = _data->get_var_types();
+    int* sidx = sample_idx ? sample_idx->data.i : 0;
+    int r_step = CV_IS_MAT_CONT(response->type) ?
+                1 : response->step / CV_ELEM_SIZE(response->type);
+    bool is_classifier = var_types->data.ptr[var_types->cols-1] == CV_VAR_CATEGORICAL;
+    int sample_count = sample_idx ? sample_idx->cols : 0;
+    sample_count = (type == CV_TRAIN_ERROR && sample_count == 0) ? values->rows : sample_count;
+    float* pred_resp = 0;
+    if( resp && (sample_count > 0) )
+    {
+        resp->resize( sample_count );
+        pred_resp = &((*resp)[0]);
+    }
+
+    if ( is_classifier )
+    {
+        for( int i = 0; i < sample_count; i++ )
+        {
+            CvMat sample, miss;
+            int si = sidx ? sidx[i] : i;
+            cvGetRow( values, &sample, si );
+            if( missing )
+                cvGetRow( missing, &miss, si );
+            float r = (float)predict( &sample, missing ? &miss : 0 )->value;
+            if( pred_resp )
+                pred_resp[i] = r;
+            int d = fabs((double)r - response->data.fl[(size_t)si*r_step]) <= FLT_EPSILON ? 0 : 1;
+            err += d;
+        }
+        err = sample_count ? err / (float)sample_count * 100 : -FLT_MAX;
+    }
+    else
+    {
+        for( int i = 0; i < sample_count; i++ )
+        {
+            CvMat sample, miss;
+            int si = sidx ? sidx[i] : i;
+            cvGetRow( values, &sample, si );
+            if( missing )
+                cvGetRow( missing, &miss, si );
+            float r = (float)predict( &sample, missing ? &miss : 0 )->value;
+            if( pred_resp )
+                pred_resp[i] = r;
+            float d = r - response->data.fl[(size_t)si*r_step];
+            err += d*d;
+        }
+        err = sample_count ? err / (float)sample_count : -FLT_MAX;
+    }
+    return err;
+}
+
+void CvDTree::prune_cv()
+{
+    CvMat* ab = 0;
+    CvMat* temp = 0;
+    CvMat* err_jk = 0;
+
+    // 1. build tree sequence for each cv fold, calculate error_{Tj,beta_k}.
+    // 2. choose the best tree index (if need, apply 1SE rule).
+    // 3. store the best index and cut the branches.
+
+    CV_FUNCNAME( "CvDTree::prune_cv" );
+
+    __BEGIN__;
+
+    int ti, j, tree_count = 0, cv_n = data->params.cv_folds, n = root->sample_count;
+    // currently, 1SE for regression is not implemented
+    bool use_1se = data->params.use_1se_rule != 0 && data->is_classifier;
+    double* err;
+    double min_err = 0, min_err_se = 0;
+    int min_idx = -1;
+
+    CV_CALL( ab = cvCreateMat( 1, 256, CV_64F ));
+
+    // build the main tree sequence, calculate alpha's
+    for(;;tree_count++)
+    {
+        double min_alpha = update_tree_rnc(tree_count, -1);
+        if( cut_tree(tree_count, -1, min_alpha) )
+            break;
+
+        if( ab->cols <= tree_count )
+        {
+            CV_CALL( temp = cvCreateMat( 1, ab->cols*3/2, CV_64F ));
+            for( ti = 0; ti < ab->cols; ti++ )
+                temp->data.db[ti] = ab->data.db[ti];
+            cvReleaseMat( &ab );
+            ab = temp;
+            temp = 0;
+        }
+
+        ab->data.db[tree_count] = min_alpha;
+    }
+
+    ab->data.db[0] = 0.;
+
+    if( tree_count > 0 )
+    {
+        for( ti = 1; ti < tree_count-1; ti++ )
+            ab->data.db[ti] = sqrt(ab->data.db[ti]*ab->data.db[ti+1]);
+        ab->data.db[tree_count-1] = DBL_MAX*0.5;
+
+        CV_CALL( err_jk = cvCreateMat( cv_n, tree_count, CV_64F ));
+        err = err_jk->data.db;
+
+        for( j = 0; j < cv_n; j++ )
+        {
+            int tj = 0, tk = 0;
+            for( ; tk < tree_count; tj++ )
+            {
+                double min_alpha = update_tree_rnc(tj, j);
+                if( cut_tree(tj, j, min_alpha) )
+                    min_alpha = DBL_MAX;
+
+                for( ; tk < tree_count; tk++ )
+                {
+                    if( ab->data.db[tk] > min_alpha )
+                        break;
+                    err[j*tree_count + tk] = root->tree_error;
+                }
+            }
+        }
+
+        for( ti = 0; ti < tree_count; ti++ )
+        {
+            double sum_err = 0;
+            for( j = 0; j < cv_n; j++ )
+                sum_err += err[j*tree_count + ti];
+            if( ti == 0 || sum_err < min_err )
+            {
+                min_err = sum_err;
+                min_idx = ti;
+                if( use_1se )
+                    min_err_se = sqrt( sum_err*(n - sum_err) );
+            }
+            else if( sum_err < min_err + min_err_se )
+                min_idx = ti;
+        }
+    }
+
+    pruned_tree_idx = min_idx;
+    free_prune_data(data->params.truncate_pruned_tree != 0);
+
+    __END__;
+
+    cvReleaseMat( &err_jk );
+    cvReleaseMat( &ab );
+    cvReleaseMat( &temp );
+}
+
+
+double CvDTree::update_tree_rnc( int T, int fold )
+{
+    CvDTreeNode* node = root;
+    double min_alpha = DBL_MAX;
+
+    for(;;)
+    {
+        CvDTreeNode* parent;
+        for(;;)
+        {
+            int t = fold >= 0 ? node->cv_Tn[fold] : node->Tn;
+            if( t <= T || !node->left )
+            {
+                node->complexity = 1;
+                node->tree_risk = node->node_risk;
+                node->tree_error = 0.;
+                if( fold >= 0 )
+                {
+                    node->tree_risk = node->cv_node_risk[fold];
+                    node->tree_error = node->cv_node_error[fold];
+                }
+                break;
+            }
+            node = node->left;
+        }
+
+        for( parent = node->parent; parent && parent->right == node;
+            node = parent, parent = parent->parent )
+        {
+            parent->complexity += node->complexity;
+            parent->tree_risk += node->tree_risk;
+            parent->tree_error += node->tree_error;
+
+            parent->alpha = ((fold >= 0 ? parent->cv_node_risk[fold] : parent->node_risk)
+                - parent->tree_risk)/(parent->complexity - 1);
+            min_alpha = MIN( min_alpha, parent->alpha );
+        }
+
+        if( !parent )
+            break;
+
+        parent->complexity = node->complexity;
+        parent->tree_risk = node->tree_risk;
+        parent->tree_error = node->tree_error;
+        node = parent->right;
+    }
+
+    return min_alpha;
+}
+
+
+int CvDTree::cut_tree( int T, int fold, double min_alpha )
+{
+    CvDTreeNode* node = root;
+    if( !node->left )
+        return 1;
+
+    for(;;)
+    {
+        CvDTreeNode* parent;
+        for(;;)
+        {
+            int t = fold >= 0 ? node->cv_Tn[fold] : node->Tn;
+            if( t <= T || !node->left )
+                break;
+            if( node->alpha <= min_alpha + FLT_EPSILON )
+            {
+                if( fold >= 0 )
+                    node->cv_Tn[fold] = T;
+                else
+                    node->Tn = T;
+                if( node == root )
+                    return 1;
+                break;
+            }
+            node = node->left;
+        }
+
+        for( parent = node->parent; parent && parent->right == node;
+            node = parent, parent = parent->parent )
+            ;
+
+        if( !parent )
+            break;
+
+        node = parent->right;
+    }
+
+    return 0;
+}
+
+
+void CvDTree::free_prune_data(bool _cut_tree)
+{
+    CvDTreeNode* node = root;
+
+    for(;;)
+    {
+        CvDTreeNode* parent;
+        for(;;)
+        {
+            // do not call cvSetRemoveByPtr( cv_heap, node->cv_Tn )
+            // as we will clear the whole cross-validation heap at the end
+            node->cv_Tn = 0;
+            node->cv_node_error = node->cv_node_risk = 0;
+            if( !node->left )
+                break;
+            node = node->left;
+        }
+
+        for( parent = node->parent; parent && parent->right == node;
+            node = parent, parent = parent->parent )
+        {
+            if( _cut_tree && parent->Tn <= pruned_tree_idx )
+            {
+                data->free_node( parent->left );
+                data->free_node( parent->right );
+                parent->left = parent->right = 0;
+            }
+        }
+
+        if( !parent )
+            break;
+
+        node = parent->right;
+    }
+
+    if( data->cv_heap )
+        cvClearSet( data->cv_heap );
+}
+
+
+void CvDTree::free_tree()
+{
+    if( root && data && data->shared )
+    {
+        pruned_tree_idx = INT_MIN;
+        free_prune_data(true);
+        data->free_node(root);
+        root = 0;
+    }
+}
+
+CvDTreeNode* CvDTree::predict( const CvMat* _sample,
+    const CvMat* _missing, bool preprocessed_input ) const
+{
+    cv::AutoBuffer<int> catbuf;
+
+    int i, mstep = 0;
+    const uchar* m = 0;
+    CvDTreeNode* node = root;
+
+    if( !node )
+        CV_Error( CV_StsError, "The tree has not been trained yet" );
+
+    if( !CV_IS_MAT(_sample) || CV_MAT_TYPE(_sample->type) != CV_32FC1 ||
+        (_sample->cols != 1 && _sample->rows != 1) ||
+        (_sample->cols + _sample->rows - 1 != data->var_all && !preprocessed_input) ||
+        (_sample->cols + _sample->rows - 1 != data->var_count && preprocessed_input) )
+            CV_Error( CV_StsBadArg,
+        "the input sample must be 1d floating-point vector with the same "
+        "number of elements as the total number of variables used for training" );
+
+    const float* sample = _sample->data.fl;
+    int step = CV_IS_MAT_CONT(_sample->type) ? 1 : _sample->step/sizeof(sample[0]);
+
+    if( data->cat_count && !preprocessed_input ) // cache for categorical variables
+    {
+        int n = data->cat_count->cols;
+        catbuf.allocate(n);
+        for( i = 0; i < n; i++ )
+            catbuf[i] = -1;
+    }
+
+    if( _missing )
+    {
+        if( !CV_IS_MAT(_missing) || !CV_IS_MASK_ARR(_missing) ||
+            !CV_ARE_SIZES_EQ(_missing, _sample) )
+            CV_Error( CV_StsBadArg,
+        "the missing data mask must be 8-bit vector of the same size as input sample" );
+        m = _missing->data.ptr;
+        mstep = CV_IS_MAT_CONT(_missing->type) ? 1 : _missing->step/sizeof(m[0]);
+    }
+
+    const int* vtype = data->var_type->data.i;
+    const int* vidx = data->var_idx && !preprocessed_input ? data->var_idx->data.i : 0;
+    const int* cmap = data->cat_map ? data->cat_map->data.i : 0;
+    const int* cofs = data->cat_ofs ? data->cat_ofs->data.i : 0;
+
+    while( node->Tn > pruned_tree_idx && node->left )
+    {
+        CvDTreeSplit* split = node->split;
+        int dir = 0;
+        for( ; !dir && split != 0; split = split->next )
+        {
+            int vi = split->var_idx;
+            int ci = vtype[vi];
+            i = vidx ? vidx[vi] : vi;
+            float val = sample[(size_t)i*step];
+            if( m && m[(size_t)i*mstep] )
+                continue;
+            if( ci < 0 ) // ordered
+                dir = val <= split->ord.c ? -1 : 1;
+            else // categorical
+            {
+                int c;
+                if( preprocessed_input )
+                    c = cvRound(val);
+                else
+                {
+                    c = catbuf[ci];
+                    if( c < 0 )
+                    {
+                        int a = c = cofs[ci];
+                        int b = (ci+1 >= data->cat_ofs->cols) ? data->cat_map->cols : cofs[ci+1];
+
+                        int ival = cvRound(val);
+                        if( ival != val )
+                            CV_Error( CV_StsBadArg,
+                            "one of input categorical variable is not an integer" );
+
+                        int sh = 0;
+                        while( a < b )
+                        {
+                            sh++;
+                            c = (a + b) >> 1;
+                            if( ival < cmap[c] )
+                                b = c;
+                            else if( ival > cmap[c] )
+                                a = c+1;
+                            else
+                                break;
+                        }
+
+                        if( c < 0 || ival != cmap[c] )
+                            continue;
+
+                        catbuf[ci] = c -= cofs[ci];
+                    }
+                }
+                c = ( (c == 65535) && data->is_buf_16u ) ? -1 : c;
+                dir = CV_DTREE_CAT_DIR(c, split->subset);
+            }
+
+            if( split->inversed )
+                dir = -dir;
+        }
+
+        if( !dir )
+        {
+            double diff = node->right->sample_count - node->left->sample_count;
+            dir = diff < 0 ? -1 : 1;
+        }
+        node = dir < 0 ? node->left : node->right;
+    }
+
+    return node;
+}
+
+
+CvDTreeNode* CvDTree::predict( const Mat& _sample, const Mat& _missing, bool preprocessed_input ) const
+{
+    CvMat sample = _sample, mmask = _missing;
+    return predict(&sample, mmask.data.ptr ? &mmask : 0, preprocessed_input);
+}
+
+
+const CvMat* CvDTree::get_var_importance()
+{
+    if( !var_importance )
+    {
+        CvDTreeNode* node = root;
+        double* importance;
+        if( !node )
+            return 0;
+        var_importance = cvCreateMat( 1, data->var_count, CV_64F );
+        cvZero( var_importance );
+        importance = var_importance->data.db;
+
+        for(;;)
+        {
+            CvDTreeNode* parent;
+            for( ;; node = node->left )
+            {
+                CvDTreeSplit* split = node->split;
+
+                if( !node->left || node->Tn <= pruned_tree_idx )
+                    break;
+
+                for( ; split != 0; split = split->next )
+                    importance[split->var_idx] += split->quality;
+            }
+
+            for( parent = node->parent; parent && parent->right == node;
+                node = parent, parent = parent->parent )
+                ;
+
+            if( !parent )
+                break;
+
+            node = parent->right;
+        }
+
+        cvNormalize( var_importance, var_importance, 1., 0, CV_L1 );
+    }
+
+    return var_importance;
+}
+
+
+void CvDTree::write_split( CvFileStorage* fs, CvDTreeSplit* split ) const
+{
+    int ci;
+
+    cvStartWriteStruct( fs, 0, CV_NODE_MAP + CV_NODE_FLOW );
+    cvWriteInt( fs, "var", split->var_idx );
+    cvWriteReal( fs, "quality", split->quality );
+
+    ci = data->get_var_type(split->var_idx);
+    if( ci >= 0 ) // split on a categorical var
+    {
+        int i, n = data->cat_count->data.i[ci], to_right = 0, default_dir;
+        for( i = 0; i < n; i++ )
+            to_right += CV_DTREE_CAT_DIR(i,split->subset) > 0;
+
+        // ad-hoc rule when to use inverse categorical split notation
+        // to achieve more compact and clear representation
+        default_dir = to_right <= 1 || to_right <= MIN(3, n/2) || to_right <= n/3 ? -1 : 1;
+
+        cvStartWriteStruct( fs, default_dir*(split->inversed ? -1 : 1) > 0 ?
+                            "in" : "not_in", CV_NODE_SEQ+CV_NODE_FLOW );
+
+        for( i = 0; i < n; i++ )
+        {
+            int dir = CV_DTREE_CAT_DIR(i,split->subset);
+            if( dir*default_dir < 0 )
+                cvWriteInt( fs, 0, i );
+        }
+        cvEndWriteStruct( fs );
+    }
+    else
+        cvWriteReal( fs, !split->inversed ? "le" : "gt", split->ord.c );
+
+    cvEndWriteStruct( fs );
+}
+
+
+void CvDTree::write_node( CvFileStorage* fs, CvDTreeNode* node ) const
+{
+    CvDTreeSplit* split;
+
+    cvStartWriteStruct( fs, 0, CV_NODE_MAP );
+
+    cvWriteInt( fs, "depth", node->depth );
+    cvWriteInt( fs, "sample_count", node->sample_count );
+    cvWriteReal( fs, "value", node->value );
+
+    if( data->is_classifier )
+        cvWriteInt( fs, "norm_class_idx", node->class_idx );
+
+    cvWriteInt( fs, "Tn", node->Tn );
+    cvWriteInt( fs, "complexity", node->complexity );
+    cvWriteReal( fs, "alpha", node->alpha );
+    cvWriteReal( fs, "node_risk", node->node_risk );
+    cvWriteReal( fs, "tree_risk", node->tree_risk );
+    cvWriteReal( fs, "tree_error", node->tree_error );
+
+    if( node->left )
+    {
+        cvStartWriteStruct( fs, "splits", CV_NODE_SEQ );
+
+        for( split = node->split; split != 0; split = split->next )
+            write_split( fs, split );
+
+        cvEndWriteStruct( fs );
+    }
+
+    cvEndWriteStruct( fs );
+}
+
+
+void CvDTree::write_tree_nodes( CvFileStorage* fs ) const
+{
+    //CV_FUNCNAME( "CvDTree::write_tree_nodes" );
+
+    __BEGIN__;
+
+    CvDTreeNode* node = root;
+
+    // traverse the tree and save all the nodes in depth-first order
+    for(;;)
+    {
+        CvDTreeNode* parent;
+        for(;;)
+        {
+            write_node( fs, node );
+            if( !node->left )
+                break;
+            node = node->left;
+        }
+
+        for( parent = node->parent; parent && parent->right == node;
+            node = parent, parent = parent->parent )
+            ;
+
+        if( !parent )
+            break;
+
+        node = parent->right;
+    }
+
+    __END__;
+}
+
+
+void CvDTree::write( CvFileStorage* fs, const char* name ) const
+{
+    //CV_FUNCNAME( "CvDTree::write" );
+
+    __BEGIN__;
+
+    cvStartWriteStruct( fs, name, CV_NODE_MAP, CV_TYPE_NAME_ML_TREE );
+
+    //get_var_importance();
+    data->write_params( fs );
+    //if( var_importance )
+    //cvWrite( fs, "var_importance", var_importance );
+    write( fs );
+
+    cvEndWriteStruct( fs );
+
+    __END__;
+}
+
+
+void CvDTree::write( CvFileStorage* fs ) const
+{
+    //CV_FUNCNAME( "CvDTree::write" );
+
+    __BEGIN__;
+
+    cvWriteInt( fs, "best_tree_idx", pruned_tree_idx );
+
+    cvStartWriteStruct( fs, "nodes", CV_NODE_SEQ );
+    write_tree_nodes( fs );
+    cvEndWriteStruct( fs );
+
+    __END__;
+}
+
+
+CvDTreeSplit* CvDTree::read_split( CvFileStorage* fs, CvFileNode* fnode )
+{
+    CvDTreeSplit* split = 0;
+
+    CV_FUNCNAME( "CvDTree::read_split" );
+
+    __BEGIN__;
+
+    int vi, ci;
+
+    if( !fnode || CV_NODE_TYPE(fnode->tag) != CV_NODE_MAP )
+        CV_ERROR( CV_StsParseError, "some of the splits are not stored properly" );
+
+    vi = cvReadIntByName( fs, fnode, "var", -1 );
+    if( (unsigned)vi >= (unsigned)data->var_count )
+        CV_ERROR( CV_StsOutOfRange, "Split variable index is out of range" );
+
+    ci = data->get_var_type(vi);
+    if( ci >= 0 ) // split on categorical var
+    {
+        int i, n = data->cat_count->data.i[ci], inversed = 0, val;
+        CvSeqReader reader;
+        CvFileNode* inseq;
+        split = data->new_split_cat( vi, 0 );
+        inseq = cvGetFileNodeByName( fs, fnode, "in" );
+        if( !inseq )
+        {
+            inseq = cvGetFileNodeByName( fs, fnode, "not_in" );
+            inversed = 1;
+        }
+        if( !inseq ||
+            (CV_NODE_TYPE(inseq->tag) != CV_NODE_SEQ && CV_NODE_TYPE(inseq->tag) != CV_NODE_INT))
+            CV_ERROR( CV_StsParseError,
+            "Either 'in' or 'not_in' tags should be inside a categorical split data" );
+
+        if( CV_NODE_TYPE(inseq->tag) == CV_NODE_INT )
+        {
+            val = inseq->data.i;
+            if( (unsigned)val >= (unsigned)n )
+                CV_ERROR( CV_StsOutOfRange, "some of in/not_in elements are out of range" );
+
+            split->subset[val >> 5] |= 1 << (val & 31);
+        }
+        else
+        {
+            cvStartReadSeq( inseq->data.seq, &reader );
+
+            for( i = 0; i < reader.seq->total; i++ )
+            {
+                CvFileNode* inode = (CvFileNode*)reader.ptr;
+                val = inode->data.i;
+                if( CV_NODE_TYPE(inode->tag) != CV_NODE_INT || (unsigned)val >= (unsigned)n )
+                    CV_ERROR( CV_StsOutOfRange, "some of in/not_in elements are out of range" );
+
+                split->subset[val >> 5] |= 1 << (val & 31);
+                CV_NEXT_SEQ_ELEM( reader.seq->elem_size, reader );
+            }
+        }
+
+        // for categorical splits we do not use inversed splits,
+        // instead we inverse the variable set in the split
+        if( inversed )
+            for( i = 0; i < (n + 31) >> 5; i++ )
+                split->subset[i] ^= -1;
+    }
+    else
+    {
+        CvFileNode* cmp_node;
+        split = data->new_split_ord( vi, 0, 0, 0, 0 );
+
+        cmp_node = cvGetFileNodeByName( fs, fnode, "le" );
+        if( !cmp_node )
+        {
+            cmp_node = cvGetFileNodeByName( fs, fnode, "gt" );
+            split->inversed = 1;
+        }
+
+        split->ord.c = (float)cvReadReal( cmp_node );
+    }
+
+    split->quality = (float)cvReadRealByName( fs, fnode, "quality" );
+
+    __END__;
+
+    return split;
+}
+
+
+CvDTreeNode* CvDTree::read_node( CvFileStorage* fs, CvFileNode* fnode, CvDTreeNode* parent )
+{
+    CvDTreeNode* node = 0;
+
+    CV_FUNCNAME( "CvDTree::read_node" );
+
+    __BEGIN__;
+
+    CvFileNode* splits;
+    int i, depth;
+
+    if( !fnode || CV_NODE_TYPE(fnode->tag) != CV_NODE_MAP )
+        CV_ERROR( CV_StsParseError, "some of the tree elements are not stored properly" );
+
+    CV_CALL( node = data->new_node( parent, 0, 0, 0 ));
+    depth = cvReadIntByName( fs, fnode, "depth", -1 );
+    if( depth != node->depth )
+        CV_ERROR( CV_StsParseError, "incorrect node depth" );
+
+    node->sample_count = cvReadIntByName( fs, fnode, "sample_count" );
+    node->value = cvReadRealByName( fs, fnode, "value" );
+    if( data->is_classifier )
+        node->class_idx = cvReadIntByName( fs, fnode, "norm_class_idx" );
+
+    node->Tn = cvReadIntByName( fs, fnode, "Tn" );
+    node->complexity = cvReadIntByName( fs, fnode, "complexity" );
+    node->alpha = cvReadRealByName( fs, fnode, "alpha" );
+    node->node_risk = cvReadRealByName( fs, fnode, "node_risk" );
+    node->tree_risk = cvReadRealByName( fs, fnode, "tree_risk" );
+    node->tree_error = cvReadRealByName( fs, fnode, "tree_error" );
+
+    splits = cvGetFileNodeByName( fs, fnode, "splits" );
+    if( splits )
+    {
+        CvSeqReader reader;
+        CvDTreeSplit* last_split = 0;
+
+        if( CV_NODE_TYPE(splits->tag) != CV_NODE_SEQ )
+            CV_ERROR( CV_StsParseError, "splits tag must stored as a sequence" );
+
+        cvStartReadSeq( splits->data.seq, &reader );
+        for( i = 0; i < reader.seq->total; i++ )
+        {
+            CvDTreeSplit* split;
+            CV_CALL( split = read_split( fs, (CvFileNode*)reader.ptr ));
+            if( !last_split )
+                node->split = last_split = split;
+            else
+                last_split = last_split->next = split;
+
+            CV_NEXT_SEQ_ELEM( reader.seq->elem_size, reader );
+        }
+    }
+
+    __END__;
+
+    return node;
+}
+
+
+void CvDTree::read_tree_nodes( CvFileStorage* fs, CvFileNode* fnode )
+{
+    CV_FUNCNAME( "CvDTree::read_tree_nodes" );
+
+    __BEGIN__;
+
+    CvSeqReader reader;
+    CvDTreeNode _root;
+    CvDTreeNode* parent = &_root;
+    int i;
+    parent->left = parent->right = parent->parent = 0;
+
+    cvStartReadSeq( fnode->data.seq, &reader );
+
+    for( i = 0; i < reader.seq->total; i++ )
+    {
+        CvDTreeNode* node;
+
+        CV_CALL( node = read_node( fs, (CvFileNode*)reader.ptr, parent != &_root ? parent : 0 ));
+        if( !parent->left )
+            parent->left = node;
+        else
+            parent->right = node;
+        if( node->split )
+            parent = node;
+        else
+        {
+            while( parent && parent->right )
+                parent = parent->parent;
+        }
+
+        CV_NEXT_SEQ_ELEM( reader.seq->elem_size, reader );
+    }
+
+    root = _root.left;
+
+    __END__;
+}
+
+
+void CvDTree::read( CvFileStorage* fs, CvFileNode* fnode )
+{
+    CvDTreeTrainData* _data = new CvDTreeTrainData();
+    _data->read_params( fs, fnode );
+
+    read( fs, fnode, _data );
+    get_var_importance();
+}
+
+
+// a special entry point for reading weak decision trees from the tree ensembles
+void CvDTree::read( CvFileStorage* fs, CvFileNode* node, CvDTreeTrainData* _data )
+{
+    CV_FUNCNAME( "CvDTree::read" );
+
+    __BEGIN__;
+
+    CvFileNode* tree_nodes;
+
+    clear();
+    data = _data;
+
+    tree_nodes = cvGetFileNodeByName( fs, node, "nodes" );
+    if( !tree_nodes || CV_NODE_TYPE(tree_nodes->tag) != CV_NODE_SEQ )
+        CV_ERROR( CV_StsParseError, "nodes tag is missing" );
+
+    pruned_tree_idx = cvReadIntByName( fs, node, "best_tree_idx", -1 );
+    read_tree_nodes( fs, tree_nodes );
+
+    __END__;
+}
+
+Mat CvDTree::getVarImportance()
+{
+    return cvarrToMat(get_var_importance());
+}
+
+/* End of file. */
index 52bacc8..d1c3e4e 100644 (file)
@@ -1,6 +1,4 @@
 #include "opencv2/core.hpp"
-
-#include "cv.h"
 #include "cascadeclassifier.h"
 
 using namespace std;
index dfba7a3..c8f024b 100644 (file)
@@ -2,9 +2,6 @@
 #define _OPENCV_FEATURES_H_
 
 #include "imagestorage.h"
-#include "cxcore.h"
-#include "cv.h"
-#include "ml.h"
 #include <stdio.h>
 
 #define FEATURES "features"
index 3aa329d..ee54eab 100644 (file)
@@ -135,7 +135,7 @@ public:
     virtual Mat getCatMap() const = 0;
     
     virtual void setTrainTestSplit(int count, bool shuffle=true) = 0;
-    virtual void setTrainTestSplitRatio(float ratio, bool shuffle=true) = 0;
+    virtual void setTrainTestSplitRatio(double ratio, bool shuffle=true) = 0;
     virtual void shuffleTrainTest() = 0;
 
     static Mat getSubVector(const Mat& vec, const Mat& idx);
@@ -156,7 +156,6 @@ class CV_EXPORTS_W StatModel : public Algorithm
 {
 public:
     enum { UPDATE_MODEL = 1, RAW_OUTPUT=1, COMPRESSED_INPUT=2, PREPROCESSED_INPUT=4 };
-    virtual ~StatModel();
     virtual void clear();
 
     virtual int getVarCount() const = 0;
@@ -164,16 +163,30 @@ public:
     virtual bool isTrained() const = 0;
     virtual bool isClassifier() const = 0;
 
-    virtual bool train( const Ptr<TrainData>& trainData, int flags=0 ) = 0;
+    virtual bool train( const Ptr<TrainData>& trainData, int flags=0 );
+    virtual bool train( InputArray samples, int layout, InputArray responses );
     virtual float calcError( const Ptr<TrainData>& data, bool test, OutputArray resp ) const;
     virtual float predict( InputArray samples, OutputArray results=noArray(), int flags=0 ) const = 0;
 
     template<typename _Tp> static Ptr<_Tp> load(const String& filename)
     {
         FileStorage fs(filename, FileStorage::READ);
-        Ptr<_Tp> p = _Tp::create();
-        p->read(fs.getFirstTopLevelNode());
-        return p->isTrained() ? p : Ptr<_Tp>();
+        Ptr<_Tp> model = _Tp::create();
+        model->read(fs.getFirstTopLevelNode());
+        return model->isTrained() ? model : Ptr<_Tp>();
+    }
+
+    template<typename _Tp> static Ptr<_Tp> train(const Ptr<TrainData>& data, const typename _Tp::Params& p, int flags=0)
+    {
+        Ptr<_Tp> model = _Tp::create(p);
+        return !model.empty() && model->train(data, flags) ? model : Ptr<_Tp>();
+    }
+
+    template<typename _Tp> static Ptr<_Tp> train(InputArray samples, int layout, InputArray responses,
+                                                 const typename _Tp::Params& p, int flags=0)
+    {
+        Ptr<_Tp> model = _Tp::create(p);
+        return !model.empty() && model->train(TrainData::create(samples, layout, responses), flags) ? model : Ptr<_Tp>();
     }
 
     virtual void save(const String& filename) const;
@@ -192,11 +205,17 @@ public:
 class CV_EXPORTS_W NormalBayesClassifier : public StatModel
 {
 public:
-    virtual ~NormalBayesClassifier();
+    class CV_EXPORTS_W_MAP Params
+    {
+    public:
+        Params();
+    };
     virtual float predictProb( InputArray inputs, OutputArray outputs,
                                OutputArray outputProbs, int flags=0 ) const = 0;
+    virtual void setParams(const Params& params) = 0;
+    virtual Params getParams() const = 0;
 
-    static Ptr<NormalBayesClassifier> create();
+    static Ptr<NormalBayesClassifier> create(const Params& params=Params());
 };
 
 /****************************************************************************************\
@@ -207,13 +226,21 @@ public:
 class CV_EXPORTS_W KNearest : public StatModel
 {
 public:
-    virtual void setDefaultK(int k) = 0;
-    virtual int getDefaultK() const = 0;
+    class CV_EXPORTS_W_MAP Params
+    {
+    public:
+        Params(int defaultK=10, bool isclassifier=true);
+
+        int defaultK;
+        bool isclassifier;
+    };
+    virtual void setParams(const Params& p) = 0;
+    virtual Params getParams() const = 0;
     virtual float findNearest( InputArray samples, int k,
                                OutputArray results,
                                OutputArray neighborResponses=noArray(),
                                OutputArray dist=noArray() ) const = 0;
-    static Ptr<KNearest> create(bool isclassifier=true);
+    static Ptr<KNearest> create(const Params& params=Params());
 };
 
 /****************************************************************************************\
@@ -247,7 +274,6 @@ public:
     class CV_EXPORTS Kernel : public Algorithm
     {
     public:
-        virtual ~Kernel();
         virtual int getType() const = 0;
         virtual void calc( int vcount, int n, const float* vecs, const float* another, float* results ) = 0;
     };
@@ -261,8 +287,6 @@ public:
     // SVM params type
     enum { C=0, GAMMA=1, P=2, NU=3, COEF=4, DEGREE=5 };
 
-    virtual ~SVM();
-
     virtual bool trainAuto( const Ptr<TrainData>& data, int kFold = 10,
                     ParamGrid Cgrid = SVM::getDefaultGrid(SVM::C),
                     ParamGrid gammaGrid  = SVM::getDefaultGrid(SVM::GAMMA),
@@ -399,8 +423,6 @@ public:
         int subsetOfs;
     };
 
-    virtual ~DTrees();
-
     virtual void setDParams(const Params& p);
     virtual Params getDParams() const;
 
@@ -464,7 +486,6 @@ public:
     // Boosting type
     enum { DISCRETE=0, REAL=1, LOGIT=2, GENTLE=3 };
 
-    virtual ~Boost();
     virtual Params getBParams() const = 0;
     virtual void setBParams(const Params& p) = 0;
 
@@ -491,7 +512,6 @@ public:
     };
 
     enum {SQUARED_LOSS=0, ABSOLUTE_LOSS, HUBER_LOSS=3, DEVIANCE_LOSS};
-    virtual ~GBTrees();
 
     virtual void setK(int k) = 0;
 
@@ -513,10 +533,16 @@ public:
     struct CV_EXPORTS_W_MAP Params
     {
         Params();
-        Params( TermCriteria termCrit, int trainMethod, double param1, double param2=0 );
+        Params( const Mat& layerSizes, int activateFunc, double fparam1, double fparam2,
+                TermCriteria termCrit, int trainMethod, double param1, double param2=0 );
 
         enum { BACKPROP=0, RPROP=1 };
 
+        CV_PROP_RW Mat layerSizes;
+        CV_PROP_RW int activateFunc;
+        CV_PROP_RW double fparam1;
+        CV_PROP_RW double fparam2;
+
         CV_PROP_RW TermCriteria termCrit;
         CV_PROP_RW int trainMethod;
 
@@ -527,23 +553,17 @@ public:
         CV_PROP_RW double rpDW0, rpDWPlus, rpDWMinus, rpDWMin, rpDWMax;
     };
 
-    virtual ~ANN_MLP();
-
     // possible activation functions
     enum { IDENTITY = 0, SIGMOID_SYM = 1, GAUSSIAN = 2 };
 
     // available training flags
     enum { UPDATE_WEIGHTS = 1, NO_INPUT_SCALE = 2, NO_OUTPUT_SCALE = 4 };
 
-    virtual Mat getLayerSizes() const = 0;
     virtual Mat getWeights(int layerIdx) const = 0;
     virtual void setParams(const Params& p) = 0;
     virtual Params getParams() const = 0;
 
-    static Ptr<ANN_MLP> create(InputArray layerSizes=noArray(),
-                               const Params& params=Params(),
-                               int activateFunc=ANN_MLP::SIGMOID_SYM,
-                               double fparam1=0, double fparam2=0);
+    static Ptr<ANN_MLP> create(const Params& params=Params());
 };
 
 /****************************************************************************************\
index 8b7ec3a..9d0180f 100644 (file)
 
 namespace cv { namespace ml {
 
-ANN_MLP::~ANN_MLP() {}
-
 ANN_MLP::Params::Params()
 {
+    layerSizes = Mat();
+    activateFunc = SIGMOID_SYM;
+    fparam1 = fparam2 = 0;
     termCrit = TermCriteria( TermCriteria::COUNT + TermCriteria::EPS, 1000, 0.01 );
     trainMethod = RPROP;
     bpDWScale = bpMomentScale = 0.1;
@@ -54,8 +55,13 @@ ANN_MLP::Params::Params()
 }
 
 
-ANN_MLP::Params::Params( TermCriteria _termCrit, int _trainMethod, double _param1, double _param2 )
+ANN_MLP::Params::Params( const Mat& _layerSizes, int _activateFunc, double _fparam1, double _fparam2,
+                         TermCriteria _termCrit, int _trainMethod, double _param1, double _param2 )
 {
+    layerSizes = _layerSizes;
+    activateFunc = _activateFunc;
+    fparam1 = _fparam1;
+    fparam2 = _fparam2;
     termCrit = _termCrit;
     trainMethod = _trainMethod;
     bpDWScale = bpMomentScale = 0.1;
@@ -95,15 +101,25 @@ public:
         clear();
     }
 
-    ANN_MLPImpl( const Mat& _layer_sizes, int _activ_func,
-                 double _f_param1, double _f_param2 )
+    ANN_MLPImpl( const Params& p )
     {
-        clear();
-        create( _layer_sizes, _activ_func, _f_param1, _f_param2 );
+        setParams(p);
     }
 
     virtual ~ANN_MLPImpl() {}
 
+    void setParams(const Params& p)
+    {
+        params = p;
+        create( params.layerSizes );
+        set_activ_func( params.activateFunc, params.fparam1, params.fparam2 );
+    }
+
+    Params getParams() const
+    {
+        return params;
+    }
+
     void clear()
     {
         min_val = max_val = min_val1 = max_val1 = 0.;
@@ -183,16 +199,13 @@ public:
         }
     }
 
-    void create( InputArray _layer_sizes, int _activ_func,
-                 double _f_param1, double _f_param2 )
+    void create( InputArray _layer_sizes )
     {
         clear();
 
         _layer_sizes.copyTo(layer_sizes);
         int l_count = layer_count();
 
-        set_activ_func( _activ_func, _f_param1, _f_param2 );
-
         weights.resize(l_count + 2);
         max_lsize = 0;
 
@@ -665,16 +678,6 @@ public:
         calc_output_scale( outputs, flags );
     }
 
-    void setParams( const Params& _params )
-    {
-        params = _params;
-    }
-
-    Params getParams() const
-    {
-        return params;
-    }
-
     bool train( const Ptr<TrainData>& trainData, int flags )
     {
         const int MAX_ITER = 1000;
@@ -1240,7 +1243,7 @@ public:
 
         vector<int> _layer_sizes;
         fn["layer_sizes"] >> _layer_sizes;
-        create( _layer_sizes, SIGMOID_SYM, 0, 0 );
+        create( _layer_sizes );
 
         int i, l_count = layer_count();
         read_params(fn);
@@ -1307,15 +1310,9 @@ public:
 };
 
 
-Ptr<ANN_MLP> ANN_MLP::create(InputArray _layerSizes,
-                             const ANN_MLP::Params& params,
-                             int activateFunc,
-                             double fparam1, double fparam2)
+Ptr<ANN_MLP> ANN_MLP::create(const ANN_MLP::Params& params)
 {
-    Mat layerSizes = _layerSizes.getMat();
-    Ptr<ANN_MLPImpl> ann = makePtr<ANN_MLPImpl>(layerSizes, activateFunc, fparam1, fparam2);
-    ann->setParams(params);
-
+    Ptr<ANN_MLPImpl> ann = makePtr<ANN_MLPImpl>(params);
     return ann;
 }
 
index 9a775a0..f485b6e 100644 (file)
@@ -54,8 +54,6 @@ log_ratio( double val )
 }
 
 
-Boost::~Boost() {}
-
 Boost::Params::Params()
 {
     boostType = Boost::REAL;
@@ -106,6 +104,7 @@ public:
     void startTraining( const Ptr<TrainData>& trainData, int flags )
     {
         DTreesImpl::startTraining(trainData, flags);
+        sumResult.assign(w->sidx.size(), 0.);
 
         if( bparams.boostType != Boost::DISCRETE )
         {
@@ -114,14 +113,10 @@ public:
             w->ord_responses.resize(n);
 
             double a = -1, b = 1;
-            if( bparams.boostType == Boost::REAL )
-                a = 0;
-            else if( bparams.boostType == Boost::LOGIT )
+            if( bparams.boostType == Boost::LOGIT )
             {
-                sumResult.assign(w->sidx.size(), 0.);
                 a = -2, b = 2;
             }
-
             for( i = 0; i < n; i++ )
                 w->ord_responses[i] = w->cat_responses[i] > 0 ? b : a;
         }
@@ -197,7 +192,7 @@ public:
         }
         else if( bparams.boostType == Boost::REAL )
         {
-            double p = node->value;
+            double p = (node->value+1)*0.5;
             node->value = 0.5*log_ratio(p);
         }
     }
@@ -227,7 +222,7 @@ public:
     {
         int i, n = (int)w->sidx.size();
         int nvars = (int)varIdx.size();
-        double sumw = 0.;
+        double sumw = 0., C = 1.;
         cv::AutoBuffer<double> buf(n*3 + nvars);
         double* result = buf;
         float* sbuf = (float*)(result + n*3);
@@ -261,7 +256,7 @@ public:
 
             if( sumw != 0 )
                 err /= sumw;
-            double C = -log_ratio( err );
+            C = -log_ratio( err );
             double scale = std::exp(C);
 
             sumw = 0;
@@ -289,6 +284,7 @@ public:
             for( i = 0; i < n; i++ )
             {
                 int si = w->sidx[i];
+                CV_Assert( std::abs(w->ord_responses[si]) == 1 );
                 double wval = w->sample_weights[si]*std::exp(-result[i]*w->ord_responses[si]);
                 sumw += wval;
                 w->sample_weights[si] = wval;
@@ -330,6 +326,20 @@ public:
         }
         else
             CV_Error(CV_StsNotImplemented, "Unknown boosting type");
+
+        /*if( bparams.boostType != Boost::LOGIT )
+        {
+            double err = 0;
+            for( i = 0; i < n; i++ )
+            {
+                sumResult[i] += result[i]*C;
+                if( bparams.boostType != Boost::DISCRETE )
+                    err += sumResult[i]*w->ord_responses[w->sidx[i]] < 0;
+                else
+                    err += sumResult[i]*w->cat_responses[w->sidx[i]] < 0;
+            }
+            printf("%d trees. C=%.2f, training error=%.1f%%, working set size=%d (out of %d)\n", (int)roots.size(), C, err*100./n, (int)sidx.size(), n);
+        }*/
         
         // renormalize weights
         if( sumw > FLT_EPSILON )
index 07e2f26..d73cb57 100644 (file)
@@ -379,7 +379,7 @@ public:
                 tempCatOfs.push_back(ofs);
                 std::copy(labels.begin(), labels.end(), std::back_inserter(tempCatMap));
             }
-            else if( haveMissing )
+            else
             {
                 tempCatOfs.push_back(Vec2i(0, 0));
                 /*Mat missing_i = layout == ROW_SAMPLE ? missing.col(i) : missing.row(i);
@@ -741,9 +741,9 @@ public:
             CV_Error( CV_StsBadArg, "type of some variables is not specified" );
     }
 
-    void setTrainTestSplitRatio(float ratio, bool shuffle)
+    void setTrainTestSplitRatio(double ratio, bool shuffle)
     {
-        CV_Assert( 0 <= ratio && ratio <= 1 );
+        CV_Assert( 0. <= ratio && ratio <= 1. );
         setTrainTestSplit(cvRound(getNSamples()*ratio), shuffle);
     }
 
index c347835..a5fd59d 100644 (file)
@@ -50,7 +50,6 @@ ParamGrid::ParamGrid(double _minVal, double _maxVal, double _logStep)
     logStep = std::max(_logStep, 1.);
 }
 
-StatModel::~StatModel() {}
 void StatModel::clear() {}
 
 int StatModel::getVarCount() const { return 0; }
@@ -61,6 +60,11 @@ bool StatModel::train( const Ptr<TrainData>&, int )
     return false;
 }
 
+bool StatModel::train( InputArray samples, int layout, InputArray responses )
+{
+    return train(TrainData::create(samples, layout, responses));
+}
+
 float StatModel::calcError( const Ptr<TrainData>& data, bool testerr, OutputArray _resp ) const
 {
     Mat samples = data->getSamples();
index 6824d26..6c40545 100644 (file)
 namespace cv {
 namespace ml {
 
+KNearest::Params::Params(int k, bool isclassifier_)
+{
+    defaultK = k;
+    isclassifier = isclassifier_;
+}
+
+
 class KNearestImpl : public KNearest
 {
 public:
-    KNearestImpl(bool __isClassifier=true)
+    KNearestImpl(const Params& p)
     {
-        defaultK = 3;
-        _isClassifier = __isClassifier;
+        params = p;
     }
 
     virtual ~KNearestImpl() {}
 
-    bool isClassifier() const { return _isClassifier; }
+    Params getParams() const { return params; }
+    void setParams(const Params& p) { params = p; }
+
+    bool isClassifier() const { return params.isclassifier; }
     bool isTrained() const { return !samples.empty(); }
 
     String getDefaultModelName() const { return "opencv_ml_knn"; }
@@ -188,7 +197,7 @@ public:
 
             if( results || testidx+range.start == 0 )
             {
-                if( !_isClassifier || k == 1 )
+                if( !params.isclassifier || k == 1 )
                 {
                     float s = 0.f;
                     for( j = 0; j < k; j++ )
@@ -316,12 +325,13 @@ public:
 
     float predict(InputArray inputs, OutputArray outputs, int) const
     {
-        return findNearest( inputs, defaultK, outputs, noArray(), noArray() );
+        return findNearest( inputs, params.defaultK, outputs, noArray(), noArray() );
     }
 
     void write( FileStorage& fs ) const
     {
-        fs << "is_classifier" << (int)_isClassifier;
+        fs << "is_classifier" << (int)params.isclassifier;
+        fs << "default_k" << params.defaultK;
 
         fs << "samples" << samples;
         fs << "responses" << responses;
@@ -330,24 +340,21 @@ public:
     void read( const FileNode& fn )
     {
         clear();
-        _isClassifier = (int)fn["is_classifier"] != 0;
+        params.isclassifier = (int)fn["is_classifier"] != 0;
+        params.defaultK = (int)fn["default_k"];
 
         fn["samples"] >> samples;
         fn["responses"] >> responses;
     }
 
-    void setDefaultK(int _k) { defaultK = _k; }
-    int getDefaultK() const { return defaultK; }
-
     Mat samples;
     Mat responses;
-    bool _isClassifier;
-    int defaultK;
+    Params params;
 };
 
-Ptr<KNearest> KNearest::create(bool isClassifier)
+Ptr<KNearest> KNearest::create(const Params& p)
 {
-    return makePtr<KNearestImpl>(isClassifier);
+    return makePtr<KNearestImpl>(p);
 }
 
 }
index afa138b..1e2d710 100644 (file)
@@ -43,7 +43,7 @@
 namespace cv {
 namespace ml {
 
-NormalBayesClassifier::~NormalBayesClassifier() {}
+NormalBayesClassifier::Params::Params() {}
 
 class NormalBayesClassifierImpl : public NormalBayesClassifier
 {
@@ -53,6 +53,9 @@ public:
         nallvars = 0;
     }
 
+    void setParams(const Params&) {}
+    Params getParams() const { return Params(); }
+
     bool train( const Ptr<TrainData>& trainData, int flags )
     {
         const float min_variation = FLT_EPSILON;
@@ -452,7 +455,7 @@ public:
 };
 
 
-Ptr<NormalBayesClassifier> NormalBayesClassifier::create()
+Ptr<NormalBayesClassifier> NormalBayesClassifier::create(const Params&)
 {
     Ptr<NormalBayesClassifierImpl> p = makePtr<NormalBayesClassifierImpl>();
     return p;
index 7715bee..a638b6b 100644 (file)
@@ -134,8 +134,6 @@ SVM::Params::Params( int _svmType, int _kernelType,
     termCrit = _termCrit;
 }
 
-SVM::Kernel::~Kernel() {}
-
 /////////////////////////////////////// SVM kernel ///////////////////////////////////////
 class SVMKernelImpl : public SVM::Kernel
 {
@@ -358,7 +356,51 @@ static void sortSamplesByClasses( const Mat& _samples, const Mat& _responses,
     
 //////////////////////// SVM implementation //////////////////////////////
 
-SVM::~SVM() {}
+ParamGrid SVM::getDefaultGrid( int param_id )
+{
+    ParamGrid grid;
+    if( param_id == SVM::C )
+    {
+        grid.minVal = 0.1;
+        grid.maxVal = 500;
+        grid.logStep = 5; // total iterations = 5
+    }
+    else if( param_id == SVM::GAMMA )
+    {
+        grid.minVal = 1e-5;
+        grid.maxVal = 0.6;
+        grid.logStep = 15; // total iterations = 4
+    }
+    else if( param_id == SVM::P )
+    {
+        grid.minVal = 0.01;
+        grid.maxVal = 100;
+        grid.logStep = 7; // total iterations = 4
+    }
+    else if( param_id == SVM::NU )
+    {
+        grid.minVal = 0.01;
+        grid.maxVal = 0.2;
+        grid.logStep = 3; // total iterations = 3
+    }
+    else if( param_id == SVM::COEF )
+    {
+        grid.minVal = 0.1;
+        grid.maxVal = 300;
+        grid.logStep = 14; // total iterations = 3
+    }
+    else if( param_id == SVM::DEGREE )
+    {
+        grid.minVal = 0.01;
+        grid.maxVal = 4;
+        grid.logStep = 7; // total iterations = 3
+    }
+    else
+        cvError( CV_StsBadArg, "SVM::getDefaultGrid", "Invalid type of parameter "
+                "(use one of SVM::C, SVM::GAMMA et al.)", __FILE__, __LINE__ );
+    return grid;
+}
+
 
 class SVMImpl : public SVM
 {
@@ -371,52 +413,6 @@ public:
         int ofs;
     };
 
-    virtual ParamGrid getDefaultGrid( int param_id ) const
-    {
-        ParamGrid grid;
-        if( param_id == SVM::C )
-        {
-            grid.minVal = 0.1;
-            grid.maxVal = 500;
-            grid.logStep = 5; // total iterations = 5
-        }
-        else if( param_id == SVM::GAMMA )
-        {
-            grid.minVal = 1e-5;
-            grid.maxVal = 0.6;
-            grid.logStep = 15; // total iterations = 4
-        }
-        else if( param_id == SVM::P )
-        {
-            grid.minVal = 0.01;
-            grid.maxVal = 100;
-            grid.logStep = 7; // total iterations = 4
-        }
-        else if( param_id == SVM::NU )
-        {
-            grid.minVal = 0.01;
-            grid.maxVal = 0.2;
-            grid.logStep = 3; // total iterations = 3
-        }
-        else if( param_id == SVM::COEF )
-        {
-            grid.minVal = 0.1;
-            grid.maxVal = 300;
-            grid.logStep = 14; // total iterations = 3
-        }
-        else if( param_id == SVM::DEGREE )
-        {
-            grid.minVal = 0.01;
-            grid.maxVal = 4;
-            grid.logStep = 7; // total iterations = 3
-        }
-        else
-            cvError( CV_StsBadArg, "SVM::getDefaultGrid", "Invalid type of parameter "
-                     "(use one of SVM::C, SVM::GAMMA et al.)", __FILE__, __LINE__ );
-        return grid;
-    }
-
-
     // Generalized SMO+SVMlight algorithm
     // Solves:
     //
@@ -1568,6 +1564,9 @@ public:
         if( svmType == C_SVC || svmType == NU_SVC )
         {
             responses = data->getTrainNormCatResponses();
+            if( responses.empty() )
+                CV_Error(CV_StsBadArg, "in the case of classification problem the responses must be categorical; "
+                                       "either specify varType when creating TrainData, or pass integer responses");
             class_labels = data->getClassLabels();
         }
         else
@@ -1793,7 +1792,7 @@ public:
         {
             int svmType = svm->params.svmType;
             int sv_total = svm->sv.rows;
-            int class_count = !svm->class_labels.empty() ? svm->class_labels.cols : svmType == ONE_CLASS ? 1 : 0;
+            int class_count = !svm->class_labels.empty() ? (int)svm->class_labels.total() : svmType == ONE_CLASS ? 1 : 0;
 
             AutoBuffer<float> _buffer(sv_total + (class_count+1)*2);
             float* buffer = _buffer;
index 2985f3f..ae05b81 100644 (file)
@@ -48,8 +48,6 @@ namespace ml {
 
 using std::vector;
 
-DTrees::~DTrees() {}
-
 void DTrees::setDParams(const DTrees::Params&)
 {
     CV_Error(CV_StsNotImplemented, "");
index 5e65fdb..98b88c7 100644 (file)
@@ -313,7 +313,7 @@ void CV_KNearestTest::run( int /*start_from*/ )
 
     int code = cvtest::TS::OK;
     Ptr<KNearest> knearest = KNearest::create(true);
-    knearest->train(TrainData::create(trainData, cv::ml::ROW_SAMPLE, trainLabels), 0);;
+    knearest->train(trainData, cv::ml::ROW_SAMPLE, trainLabels);
     knearest->findNearest( testData, 4, bestLabels);
     float err;
     if( !calcErr( bestLabels, testLabels, sizes, err, true ) )
index 7a116f5..b7c5f46 100644 (file)
@@ -371,8 +371,9 @@ int CV_MLBaseTest::train( int testCaseIdx )
                                  data->getVarIdx(), data->getTrainSampleIdx());
         int layer_sz[] = { data->getNAllVars(), 100, 100, (int)cls_map.size() };
         Mat layer_sizes( 1, (int)(sizeof(layer_sz)/sizeof(layer_sz[0])), CV_32S, layer_sz );
-        model = ANN_MLP::create(layer_sizes, ANN_MLP::Params(TermCriteria(TermCriteria::COUNT,300,0.01),
-                                                        str_to_ann_train_method(train_method_str), param1, param2));
+        model = ANN_MLP::create(ANN_MLP::Params(layer_sizes, ANN_MLP::SIGMOID_SYM, 0, 0,
+                                                TermCriteria(TermCriteria::COUNT,300,0.01),
+                                                str_to_ann_train_method(train_method_str), param1, param2));
     }
     else if( modelName == CV_DTREE )
     {
diff --git a/samples/cpp/agaricus-lepiota.data b/samples/cpp/agaricus-lepiota.data
deleted file mode 100644 (file)
index 14fe8bb..0000000
+++ /dev/null
@@ -1,8124 +0,0 @@
-p,x,s,n,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,k,s,u
-e,x,s,y,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,n,g
-e,b,s,w,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,n,m
-p,x,y,w,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,k,s,u
-e,x,s,g,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,n,a,g
-e,x,y,y,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,n,g
-e,b,s,w,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,n,m
-e,b,y,w,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,s,m
-p,x,y,w,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,k,v,g
-e,b,s,y,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,s,m
-e,x,y,y,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,n,g
-e,x,y,y,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,s,m
-e,b,s,y,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,s,g
-p,x,y,w,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,n,v,u
-e,x,f,n,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,k,a,g
-e,s,f,g,f,n,f,c,n,k,e,e,s,s,w,w,p,w,o,p,n,y,u
-e,f,f,w,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,n,a,g
-p,x,s,n,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,k,s,g
-p,x,y,w,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,n,s,u
-p,x,s,n,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,n,s,u
-e,b,s,y,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,s,m
-p,x,y,n,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,n,v,g
-e,b,y,y,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,s,m
-e,b,y,w,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,n,m
-e,b,s,w,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,s,m
-p,f,s,w,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,n,v,g
-e,x,y,y,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,n,m
-e,x,y,w,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,n,m
-e,f,f,n,f,n,f,c,n,k,e,e,s,s,w,w,p,w,o,p,k,y,u
-e,x,s,y,t,a,f,w,n,n,t,b,s,s,w,w,p,w,o,p,n,v,d
-e,b,s,y,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,n,m
-p,x,y,w,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,n,s,u
-e,x,y,y,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,n,m
-e,x,y,n,t,l,f,c,b,p,e,r,s,y,w,w,p,w,o,p,n,y,p
-e,b,y,y,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,s,m
-e,x,f,y,t,l,f,w,n,w,t,b,s,s,w,w,p,w,o,p,n,v,d
-e,s,f,g,f,n,f,c,n,k,e,e,s,s,w,w,p,w,o,p,k,v,u
-p,x,y,n,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,n,s,u
-e,x,f,y,t,a,f,w,n,p,t,b,s,s,w,w,p,w,o,p,n,v,d
-e,b,s,y,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,s,m
-e,b,y,y,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,s,g
-e,x,y,y,t,l,f,c,b,n,e,r,s,y,w,w,p,w,o,p,k,y,p
-e,x,f,n,f,n,f,c,n,g,e,e,s,s,w,w,p,w,o,p,k,y,u
-p,x,y,w,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,n,v,g
-e,x,s,y,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,n,m
-e,x,y,w,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,n,g
-e,x,y,y,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,s,m
-e,x,s,w,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,n,m
-e,x,y,y,t,l,f,c,b,n,e,r,s,y,w,w,p,w,o,p,n,s,p
-e,f,y,y,t,l,f,c,b,w,e,r,s,y,w,w,p,w,o,p,k,s,p
-e,x,y,n,t,a,f,c,b,w,e,r,s,y,w,w,p,w,o,p,k,s,g
-e,x,s,w,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,s,g
-e,b,s,w,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,n,m
-p,x,y,n,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,n,v,u
-p,x,s,w,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,k,v,u
-e,b,y,y,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,s,m
-e,f,f,g,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,n,a,g
-e,b,s,w,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,n,g
-e,x,s,y,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,n,g
-e,x,y,n,t,a,f,c,b,p,e,r,s,y,w,w,p,w,o,p,k,y,p
-e,s,f,g,f,n,f,c,n,k,e,e,s,s,w,w,p,w,o,p,n,v,u
-e,b,y,y,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,s,m
-e,b,s,y,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,s,m
-e,b,y,y,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,n,m
-e,b,y,w,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,s,g
-e,f,s,n,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,k,a,g
-e,x,s,w,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,s,g
-e,f,y,y,t,a,f,c,b,w,e,r,s,y,w,w,p,w,o,p,n,s,g
-e,x,y,y,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,n,g
-e,x,f,g,f,n,f,c,n,p,e,e,s,s,w,w,p,w,o,p,n,v,u
-e,f,f,y,t,l,f,w,n,p,t,b,s,s,w,w,p,w,o,p,n,v,d
-e,b,y,w,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,s,m
-e,f,f,y,t,l,f,w,n,w,t,b,s,s,w,w,p,w,o,p,n,v,d
-e,x,y,n,t,a,f,c,b,p,e,r,s,y,w,w,p,w,o,p,k,s,p
-e,b,s,y,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,s,g
-e,f,s,y,t,l,f,w,n,p,t,b,s,s,w,w,p,w,o,p,n,v,d
-e,x,s,w,t,l,f,w,n,n,t,b,s,s,w,w,p,w,o,p,u,v,d
-e,f,y,n,t,l,f,c,b,p,e,r,s,y,w,w,p,w,o,p,n,y,p
-p,x,y,n,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,n,v,u
-e,f,y,n,t,a,f,c,b,n,e,r,s,y,w,w,p,w,o,p,n,y,g
-e,x,s,n,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,n,s,g
-p,x,y,w,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,k,s,g
-e,f,f,g,f,n,f,c,n,n,e,e,s,s,w,w,p,w,o,p,n,y,u
-e,x,f,g,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,n,s,g
-e,x,y,y,t,l,f,c,b,w,e,r,s,y,w,w,p,w,o,p,k,s,g
-e,x,s,n,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,k,s,g
-e,b,s,w,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,s,g
-e,x,s,w,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,s,g
-e,f,y,n,t,l,f,c,b,w,e,r,s,y,w,w,p,w,o,p,k,y,g
-e,s,f,n,f,n,f,c,n,n,e,e,s,s,w,w,p,w,o,p,n,v,u
-e,x,f,n,f,n,f,c,n,n,e,e,s,s,w,w,p,w,o,p,n,y,u
-e,b,s,w,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,s,g
-e,x,y,y,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,s,g
-e,x,y,y,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,n,m
-e,x,s,n,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,n,a,g
-e,x,s,w,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,s,g
-e,f,y,n,t,l,f,c,b,p,e,r,s,y,w,w,p,w,o,p,n,s,g
-e,x,s,y,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,n,g
-e,b,s,w,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,s,g
-e,x,y,w,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,s,g
-e,x,f,n,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,k,s,g
-e,b,s,y,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,n,g
-e,f,y,y,t,l,f,c,b,w,e,r,s,y,w,w,p,w,o,p,n,s,g
-e,x,y,y,t,a,f,c,b,n,e,r,s,y,w,w,p,w,o,p,k,y,p
-e,b,y,w,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,n,g
-e,x,y,y,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,n,m
-e,x,y,y,t,a,f,c,b,w,e,r,s,y,w,w,p,w,o,p,n,y,g
-e,b,y,w,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,s,m
-e,b,y,w,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,s,m
-e,x,s,y,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,n,m
-e,x,s,y,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,n,g
-e,s,f,g,f,n,f,c,n,g,e,e,s,s,w,w,p,w,o,p,k,y,u
-e,x,f,w,t,a,f,w,n,w,t,b,s,s,w,w,p,w,o,p,u,v,d
-e,x,s,y,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,n,m
-p,x,y,w,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,n,v,u
-e,x,y,y,t,l,f,c,b,p,e,r,s,y,w,w,p,w,o,p,n,s,g
-e,s,f,g,f,n,f,c,n,p,e,e,s,s,w,w,p,w,o,p,n,y,u
-e,x,y,y,t,l,f,c,b,w,e,r,s,y,w,w,p,w,o,p,k,y,g
-e,x,s,y,t,l,f,w,n,p,t,b,s,s,w,w,p,w,o,p,u,v,d
-e,s,f,n,f,n,f,c,n,k,e,e,s,s,w,w,p,w,o,p,n,y,u
-p,x,s,w,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,k,v,g
-e,x,y,w,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,n,m
-p,f,y,n,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,k,v,g
-e,f,s,g,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,n,a,g
-e,x,s,y,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,s,m
-e,x,s,w,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,k,s,g
-e,b,s,y,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,n,g
-e,f,f,g,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,n,a,g
-e,x,s,w,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,n,g
-e,b,s,w,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,s,m
-e,b,s,w,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,s,g
-e,b,y,w,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,s,m
-e,f,s,w,t,l,f,w,n,w,t,b,s,s,w,w,p,w,o,p,u,v,d
-e,x,y,y,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,s,m
-e,f,s,w,t,a,f,w,n,p,t,b,s,s,w,w,p,w,o,p,n,v,d
-p,x,y,w,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,n,v,u
-e,f,f,w,t,l,f,w,n,w,t,b,s,s,w,w,p,w,o,p,n,v,d
-e,x,y,y,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,s,g
-p,x,s,n,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,n,v,g
-e,b,s,y,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,n,g
-e,x,y,n,t,a,f,c,b,w,e,r,s,y,w,w,p,w,o,p,k,y,p
-e,b,y,y,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,n,m
-e,s,f,n,f,n,f,c,n,k,e,e,s,s,w,w,p,w,o,p,n,v,u
-e,f,y,n,t,a,f,c,b,w,e,r,s,y,w,w,p,w,o,p,k,y,p
-e,x,y,y,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,n,g
-e,x,f,g,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,k,s,g
-e,f,f,w,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,n,a,g
-e,x,y,y,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,n,m
-e,b,s,y,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,n,g
-e,b,y,w,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,n,m
-e,x,y,w,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,s,g
-e,x,s,n,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,n,a,g
-e,x,y,w,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,s,g
-e,s,f,n,f,n,f,c,n,k,e,e,s,s,w,w,p,w,o,p,k,v,u
-e,x,s,w,t,a,f,w,n,w,t,b,s,s,w,w,p,w,o,p,u,v,d
-e,x,y,n,t,l,f,c,b,w,e,r,s,y,w,w,p,w,o,p,k,s,g
-e,b,y,y,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,n,g
-e,x,y,w,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,n,g
-e,b,y,w,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,s,m
-e,b,s,y,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,s,g
-e,b,s,y,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,s,m
-e,b,y,y,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,n,g
-e,x,f,n,f,n,f,c,n,k,e,e,s,s,w,w,p,w,o,p,n,y,u
-e,f,y,n,t,l,f,c,b,n,e,r,s,y,w,w,p,w,o,p,n,y,g
-e,x,y,w,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,s,g
-e,f,y,y,t,l,f,c,b,w,e,r,s,y,w,w,p,w,o,p,n,y,p
-e,b,s,w,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,s,g
-e,b,s,w,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,s,m
-e,x,y,n,t,l,f,c,b,w,e,r,s,y,w,w,p,w,o,p,k,y,g
-e,b,s,w,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,s,g
-e,x,f,g,f,n,f,c,n,g,e,e,s,s,w,w,p,w,o,p,n,y,u
-e,b,s,y,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,s,g
-e,x,f,y,t,l,f,w,n,n,t,b,s,s,w,w,p,w,o,p,u,v,d
-e,b,y,y,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,s,g
-e,f,y,y,t,l,f,c,b,p,e,r,s,y,w,w,p,w,o,p,n,s,g
-e,b,y,w,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,n,m
-e,b,y,w,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,n,m
-e,b,y,y,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,s,g
-e,x,y,y,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,n,m
-e,b,s,y,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,n,g
-p,x,y,w,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,n,v,u
-e,s,f,n,f,n,f,c,n,g,e,e,s,s,w,w,p,w,o,p,n,y,u
-e,f,f,n,f,n,f,c,n,g,e,e,s,s,w,w,p,w,o,p,k,v,u
-e,x,s,y,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,s,m
-e,f,y,n,t,a,f,c,b,p,e,r,s,y,w,w,p,w,o,p,k,s,p
-p,x,y,w,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,k,s,g
-e,b,s,w,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,s,m
-e,f,f,g,f,n,f,c,n,p,e,e,s,s,w,w,p,w,o,p,k,v,u
-e,b,y,y,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,n,g
-e,x,y,n,t,a,f,c,b,w,e,r,s,y,w,w,p,w,o,p,n,y,p
-e,x,f,w,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,k,s,g
-e,x,s,w,t,l,f,w,n,w,t,b,s,s,w,w,p,w,o,p,n,v,d
-e,b,s,w,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,s,m
-e,f,s,y,t,a,f,w,n,w,t,b,s,s,w,w,p,w,o,p,n,v,d
-e,x,s,y,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,n,m
-e,f,f,g,f,n,f,c,n,g,e,e,s,s,w,w,p,w,o,p,n,y,u
-e,b,s,y,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,s,g
-e,x,s,w,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,n,m
-e,x,y,w,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,s,m
-e,f,s,w,t,a,f,w,n,n,t,b,s,s,w,w,p,w,o,p,n,v,d
-e,x,y,y,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,s,g
-e,b,s,w,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,s,g
-e,x,s,w,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,n,g
-e,x,f,w,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,k,s,g
-e,f,y,n,t,l,f,c,b,n,e,r,s,y,w,w,p,w,o,p,k,y,p
-p,x,s,w,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,n,v,u
-e,b,s,w,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,n,g
-e,b,s,w,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,s,m
-e,b,y,w,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,n,g
-e,b,y,w,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,n,g
-e,x,s,y,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,s,g
-e,b,s,w,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,n,m
-e,x,f,y,t,a,f,w,n,n,t,b,s,s,w,w,p,w,o,p,n,v,d
-e,x,f,g,f,n,f,c,n,n,e,e,s,s,w,w,p,w,o,p,n,y,u
-e,f,y,y,t,a,f,c,b,n,e,r,s,y,w,w,p,w,o,p,n,s,g
-e,b,s,w,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,n,m
-e,x,s,y,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,s,g
-e,x,y,y,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,s,g
-e,x,y,w,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,s,g
-e,s,f,g,f,n,f,c,n,p,e,e,s,s,w,w,p,w,o,p,n,v,u
-e,x,s,w,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,n,m
-p,x,s,w,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,k,s,g
-e,x,y,y,t,a,f,c,b,n,e,r,s,y,w,w,p,w,o,p,n,s,g
-e,f,f,w,t,a,f,w,n,p,t,b,s,s,w,w,p,w,o,p,n,v,d
-e,x,y,w,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,n,m
-e,b,y,w,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,s,g
-e,x,s,w,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,n,g
-e,x,s,y,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,s,g
-p,x,y,n,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,k,v,u
-e,b,s,y,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,n,g
-e,x,f,g,f,n,f,c,n,n,e,e,s,s,w,w,p,w,o,p,k,y,u
-p,x,y,w,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,k,s,u
-e,x,y,y,t,l,f,c,b,n,e,r,s,y,w,w,p,w,o,p,n,y,p
-e,f,f,n,f,n,f,c,n,k,e,e,s,s,w,w,p,w,o,p,k,v,u
-e,b,s,w,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,n,g
-e,x,f,w,t,l,f,w,n,w,t,b,s,s,w,w,p,w,o,p,n,v,d
-e,x,y,w,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,s,g
-e,b,y,y,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,s,g
-e,x,y,y,t,l,f,c,b,n,e,r,s,y,w,w,p,w,o,p,k,s,g
-e,f,y,y,t,a,f,c,b,p,e,r,s,y,w,w,p,w,o,p,k,s,p
-e,f,y,y,t,a,f,c,b,w,e,r,s,y,w,w,p,w,o,p,k,y,p
-e,x,s,w,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,s,g
-e,x,s,w,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,n,m
-p,x,s,w,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,k,v,u
-e,f,f,w,t,a,f,w,n,p,t,b,s,s,w,w,p,w,o,p,u,v,d
-e,x,s,w,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,s,g
-e,x,s,w,t,l,f,w,n,p,t,b,s,s,w,w,p,w,o,p,u,v,d
-e,x,y,w,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,s,m
-e,f,y,y,t,l,f,c,b,w,e,r,s,y,w,w,p,w,o,p,k,y,p
-e,x,s,n,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,k,s,g
-e,f,y,y,t,a,f,c,b,w,e,r,s,y,w,w,p,w,o,p,n,y,g
-p,x,s,n,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,n,s,g
-e,s,f,n,f,n,f,c,n,g,e,e,s,s,w,w,p,w,o,p,n,v,u
-e,b,y,y,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,s,m
-e,b,s,w,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,s,g
-e,b,y,w,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,n,m
-e,f,f,n,f,n,f,c,n,k,e,e,s,s,w,w,p,w,o,p,n,v,u
-e,x,s,w,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,n,m
-e,b,y,w,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,n,g
-e,b,y,w,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,n,m
-e,f,y,n,t,l,f,c,b,w,e,r,s,y,w,w,p,w,o,p,n,s,g
-p,x,y,w,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,k,v,g
-e,x,s,w,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,n,g
-e,x,y,w,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,s,m
-e,f,f,w,t,l,f,w,n,w,t,b,s,s,w,w,p,w,o,p,u,v,d
-e,f,f,g,f,n,f,c,n,g,e,e,s,s,w,w,p,w,o,p,k,v,u
-e,f,s,g,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,k,a,g
-e,x,y,y,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,s,m
-e,b,s,w,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,s,m
-p,f,y,n,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,k,s,u
-e,x,s,w,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,n,m
-p,f,s,n,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,k,v,u
-e,x,y,w,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,s,g
-e,x,y,y,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,n,m
-e,f,y,y,t,a,f,c,b,n,e,r,s,y,w,w,p,w,o,p,n,s,p
-e,x,y,n,t,a,f,c,b,w,e,r,s,y,w,w,p,w,o,p,n,s,p
-e,f,y,n,t,a,f,c,b,w,e,r,s,y,w,w,p,w,o,p,n,y,g
-e,x,y,w,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,n,m
-e,f,f,n,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,k,s,g
-e,x,s,y,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,n,m
-p,x,y,w,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,n,s,g
-e,b,y,y,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,n,m
-e,s,f,n,f,n,f,c,n,p,e,e,s,s,w,w,p,w,o,p,n,v,u
-e,x,s,y,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,s,m
-e,b,y,w,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,n,m
-e,f,y,n,t,a,f,c,b,n,e,r,s,y,w,w,p,w,o,p,k,s,g
-e,b,y,y,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,s,m
-e,b,y,w,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,s,g
-e,x,y,n,t,l,f,c,b,n,e,r,s,y,w,w,p,w,o,p,n,y,p
-e,f,f,g,f,n,f,c,n,p,e,e,s,s,w,w,p,w,o,p,n,y,u
-e,x,f,g,f,n,f,c,n,g,e,e,s,s,w,w,p,w,o,p,k,y,u
-e,b,y,y,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,s,g
-e,x,s,y,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,s,m
-e,b,y,w,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,s,g
-e,x,s,w,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,s,m
-e,b,s,y,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,n,m
-e,x,s,y,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,s,m
-e,x,f,g,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,k,s,g
-e,f,s,y,t,a,f,w,n,p,t,b,s,s,w,w,p,w,o,p,n,v,d
-p,x,y,w,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,k,s,g
-e,x,f,w,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,k,a,g
-e,b,y,w,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,s,g
-e,x,s,y,t,l,f,w,n,w,t,b,s,s,w,w,p,w,o,p,n,v,d
-e,b,y,w,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,s,g
-e,x,y,w,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,s,g
-e,x,f,n,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,n,y,d
-e,b,y,w,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,s,m
-e,x,s,y,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,n,m
-e,f,y,n,t,a,f,c,b,p,e,r,s,y,w,w,p,w,o,p,k,y,g
-e,x,f,w,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,n,s,g
-e,x,s,y,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,s,g
-p,x,y,w,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,n,v,g
-e,x,y,y,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,s,g
-e,x,f,w,t,a,f,w,n,p,t,b,s,s,w,w,p,w,o,p,n,v,d
-e,b,s,y,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,n,g
-p,x,y,w,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,k,v,u
-e,x,s,y,t,a,f,w,n,w,t,b,s,s,w,w,p,w,o,p,n,v,d
-e,x,y,w,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,n,m
-e,x,f,w,t,l,f,w,n,n,t,b,s,s,w,w,p,w,o,p,n,v,d
-e,f,s,w,t,l,f,w,n,w,t,b,s,s,w,w,p,w,o,p,n,v,d
-e,x,y,y,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,s,m
-e,f,f,y,t,a,f,w,n,w,t,b,s,s,w,w,p,w,o,p,u,v,d
-e,x,s,w,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,s,g
-e,b,y,y,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,s,m
-e,x,y,n,t,l,f,c,b,p,e,r,s,y,w,w,p,w,o,p,n,s,p
-e,b,y,w,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,n,g
-e,x,s,y,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,s,m
-p,x,y,n,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,n,v,u
-e,b,s,y,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,n,g
-e,b,y,w,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,n,g
-p,x,y,n,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,k,v,u
-e,b,s,w,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,s,m
-e,b,y,y,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,n,m
-e,b,y,y,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,s,g
-e,x,y,w,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,n,g
-e,x,f,n,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,k,y,d
-e,x,y,n,t,a,f,c,b,w,e,r,s,y,w,w,p,w,o,p,k,y,g
-e,b,s,y,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,s,g
-e,x,f,g,f,n,f,c,n,g,e,e,s,s,w,w,p,w,o,p,n,v,u
-e,x,y,n,t,l,f,c,b,n,e,r,s,y,w,w,p,w,o,p,k,y,g
-e,x,s,w,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,s,g
-e,b,y,w,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,n,g
-e,x,s,y,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,s,m
-e,f,f,y,t,a,f,w,n,p,t,b,s,s,w,w,p,w,o,p,n,v,d
-e,b,y,y,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,s,g
-e,x,f,n,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,n,a,g
-e,x,f,y,t,l,f,w,n,n,t,b,s,s,w,w,p,w,o,p,n,v,d
-e,x,s,y,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,n,m
-e,f,s,g,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,k,s,g
-e,x,f,n,f,n,f,c,n,g,e,e,s,s,w,w,p,w,o,p,n,y,u
-e,f,s,g,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,n,a,g
-e,f,y,n,t,a,f,c,b,p,e,r,s,y,w,w,p,w,o,p,n,s,p
-e,b,y,w,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,s,g
-e,b,s,y,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,s,m
-e,x,y,y,t,l,f,c,b,w,e,r,s,y,w,w,p,w,o,p,k,s,p
-e,s,f,g,f,n,f,c,n,g,e,e,s,s,w,w,p,w,o,p,n,v,u
-e,f,y,n,t,a,f,c,b,p,e,r,s,y,w,w,p,w,o,p,n,y,p
-p,x,y,n,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,k,s,u
-e,x,y,y,t,a,f,c,b,p,e,r,s,y,w,w,p,w,o,p,n,y,p
-e,x,y,y,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,s,g
-e,x,f,n,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,n,s,g
-e,x,f,n,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,k,a,g
-e,f,s,n,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,k,a,g
-e,f,y,n,t,a,f,c,b,n,e,r,s,y,w,w,p,w,o,p,n,s,p
-e,x,s,y,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,n,g
-e,f,f,n,f,n,f,c,n,p,e,e,s,s,w,w,p,w,o,p,k,v,u
-e,x,y,y,t,a,f,c,b,w,e,r,s,y,w,w,p,w,o,p,n,s,p
-e,x,f,y,t,a,f,w,n,p,t,b,s,s,w,w,p,w,o,p,u,v,d
-e,b,y,y,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,n,g
-e,x,f,g,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,k,s,g
-e,x,y,y,t,a,f,c,b,w,e,r,s,y,w,w,p,w,o,p,n,s,g
-e,f,y,n,t,l,f,c,b,n,e,r,s,y,w,w,p,w,o,p,k,y,g
-e,x,s,y,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,s,m
-e,s,f,n,f,n,f,c,n,n,e,e,s,s,w,w,p,w,o,p,n,y,u
-e,x,s,w,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,s,m
-e,b,s,w,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,n,g
-e,s,f,n,f,n,f,c,n,k,e,e,s,s,w,w,p,w,o,p,k,y,u
-e,b,y,y,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,n,m
-e,x,y,y,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,n,g
-e,b,y,y,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,s,g
-p,x,y,n,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,k,s,u
-e,x,s,w,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,n,s,g
-e,x,y,w,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,s,m
-e,b,s,w,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,n,g
-e,x,f,n,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,n,y,d
-p,x,s,n,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,n,s,u
-e,x,y,n,t,l,f,c,b,w,e,r,s,y,w,w,p,w,o,p,k,y,p
-e,x,s,n,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,k,a,g
-e,b,y,y,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,n,m
-e,x,s,y,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,n,m
-e,b,s,y,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,s,m
-e,x,s,y,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,n,m
-e,b,y,w,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,n,g
-e,x,f,g,f,n,f,c,n,k,e,e,s,s,w,w,p,w,o,p,n,v,u
-e,f,y,n,t,a,f,c,b,w,e,r,s,y,w,w,p,w,o,p,k,s,g
-e,x,f,g,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,n,a,g
-e,x,s,w,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,n,m
-e,x,y,y,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,s,m
-e,x,f,g,f,n,f,c,n,n,e,e,s,s,w,w,p,w,o,p,k,v,u
-p,x,s,n,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,n,s,u
-e,b,y,w,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,n,g
-e,b,y,w,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,n,m
-p,x,y,n,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,n,v,u
-e,b,s,y,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,s,m
-e,x,s,w,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,s,m
-e,x,y,y,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,s,m
-e,b,y,y,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,n,g
-e,x,s,w,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,s,m
-e,x,s,w,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,s,g
-e,x,y,n,t,l,f,c,b,w,e,r,s,y,w,w,p,w,o,p,n,y,p
-e,x,s,w,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,s,m
-e,x,s,n,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,k,s,g
-e,x,y,n,t,l,f,c,b,n,e,r,s,y,w,w,p,w,o,p,n,s,p
-e,x,y,y,t,l,f,c,b,n,e,r,s,y,w,w,p,w,o,p,k,y,g
-p,x,y,n,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,k,v,g
-e,b,y,y,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,n,g
-e,x,f,w,t,l,f,w,n,p,t,b,s,s,w,w,p,w,o,p,u,v,d
-p,x,s,n,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,k,s,u
-e,x,y,y,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,n,g
-e,f,f,g,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,n,a,g
-e,x,y,w,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,s,g
-e,b,y,w,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,s,m
-p,x,y,n,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,n,s,u
-e,f,y,n,t,l,f,c,b,w,e,r,s,y,w,w,p,w,o,p,k,s,g
-e,x,s,w,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,n,m
-e,x,s,w,t,a,f,w,n,n,t,b,s,s,w,w,p,w,o,p,n,v,d
-e,f,y,n,t,a,f,c,b,n,e,r,s,y,w,w,p,w,o,p,n,y,p
-e,b,s,w,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,n,m
-e,x,s,w,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,k,a,g
-e,f,f,g,f,n,f,c,n,n,e,e,s,s,w,w,p,w,o,p,n,v,u
-e,x,y,w,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,s,g
-e,x,y,y,t,l,f,c,b,n,e,r,s,y,w,w,p,w,o,p,n,y,g
-e,b,s,y,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,s,g
-e,b,s,y,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,s,g
-e,f,s,w,t,l,f,w,n,n,t,b,s,s,w,w,p,w,o,p,n,v,d
-e,b,s,w,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,n,m
-e,x,f,y,t,a,f,w,n,n,t,b,s,s,w,w,p,w,o,p,u,v,d
-e,x,y,w,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,n,m
-e,b,y,y,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,n,m
-e,x,y,n,t,a,f,c,b,p,e,r,s,y,w,w,p,w,o,p,n,y,p
-e,x,y,w,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,s,m
-e,x,y,w,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,s,m
-e,x,y,y,t,a,f,c,b,p,e,r,s,y,w,w,p,w,o,p,k,y,p
-e,x,y,w,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,n,m
-e,x,s,y,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,n,m
-e,x,y,n,t,l,f,c,b,p,e,r,s,y,w,w,p,w,o,p,k,s,g
-e,x,s,w,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,n,g
-e,f,y,n,t,l,f,c,b,n,e,r,s,y,w,w,p,w,o,p,k,s,g
-e,b,s,w,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,s,m
-e,x,f,n,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,n,y,d
-e,x,y,w,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,s,m
-e,b,s,w,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,n,g
-e,b,s,y,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,n,m
-e,x,f,y,t,l,f,w,n,p,t,b,s,s,w,w,p,w,o,p,n,v,d
-e,b,s,w,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,n,m
-e,x,s,g,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,k,a,g
-e,b,s,w,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,n,g
-e,b,s,y,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,s,m
-e,f,y,y,t,a,f,c,b,n,e,r,s,y,w,w,p,w,o,p,k,y,p
-e,f,y,n,t,a,f,c,b,w,e,r,s,y,w,w,p,w,o,p,n,y,p
-e,x,s,w,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,s,m
-e,f,y,y,t,l,f,c,b,n,e,r,s,y,w,w,p,w,o,p,n,s,g
-e,b,s,w,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,s,g
-e,x,y,y,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,n,m
-e,x,f,n,f,n,f,c,n,k,e,e,s,s,w,w,p,w,o,p,k,v,u
-e,f,y,n,t,a,f,c,b,p,e,r,s,y,w,w,p,w,o,p,n,y,g
-e,x,s,w,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,n,m
-e,x,s,y,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,n,g
-e,b,y,y,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,n,g
-e,f,y,y,t,a,f,c,b,w,e,r,s,y,w,w,p,w,o,p,n,s,p
-e,x,y,y,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,n,g
-e,x,y,w,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,n,m
-e,f,f,w,t,a,f,w,n,n,t,b,s,s,w,w,p,w,o,p,u,v,d
-e,s,f,g,f,n,f,c,n,k,e,e,s,s,w,w,p,w,o,p,k,y,u
-e,f,s,y,t,l,f,w,n,n,t,b,s,s,w,w,p,w,o,p,n,v,d
-e,f,f,g,f,n,f,c,n,k,e,e,s,s,w,w,p,w,o,p,k,y,u
-e,x,f,n,f,n,f,c,n,p,e,e,s,s,w,w,p,w,o,p,n,y,u
-e,b,y,w,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,n,g
-e,x,s,w,t,a,f,w,n,p,t,b,s,s,w,w,p,w,o,p,n,v,d
-e,b,y,y,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,s,m
-e,b,y,y,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,s,m
-e,b,y,y,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,s,g
-e,f,s,w,t,l,f,w,n,p,t,b,s,s,w,w,p,w,o,p,u,v,d
-e,f,f,g,f,n,f,c,n,g,e,e,s,s,w,w,p,w,o,p,k,y,u
-e,f,s,w,t,a,f,w,n,w,t,b,s,s,w,w,p,w,o,p,n,v,d
-e,f,y,y,t,a,f,c,b,p,e,r,s,y,w,w,p,w,o,p,k,y,g
-e,x,y,w,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,s,m
-e,s,f,n,f,n,f,c,n,p,e,e,s,s,w,w,p,w,o,p,n,y,u
-e,x,y,n,t,l,f,c,b,n,e,r,s,y,w,w,p,w,o,p,k,s,p
-e,x,y,n,t,a,f,c,b,n,e,r,s,y,w,w,p,w,o,p,k,s,p
-e,f,f,y,t,a,f,w,n,n,t,b,s,s,w,w,p,w,o,p,u,v,d
-p,f,s,n,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,n,s,g
-p,x,s,n,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,n,v,u
-e,f,y,n,t,l,f,c,b,w,e,r,s,y,w,w,p,w,o,p,n,s,p
-e,x,y,y,t,a,f,c,b,w,e,r,s,y,w,w,p,w,o,p,k,s,g
-e,b,s,y,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,n,m
-e,f,f,w,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,n,a,g
-e,f,y,n,t,a,f,c,b,p,e,r,s,y,w,w,p,w,o,p,n,s,g
-e,x,y,w,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,s,g
-e,b,s,y,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,n,m
-e,x,s,y,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,n,g
-e,b,y,y,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,n,g
-e,x,s,w,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,n,m
-e,b,s,y,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,s,m
-e,b,y,y,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,s,g
-p,x,s,n,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,n,v,g
-e,x,s,g,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,n,s,g
-e,f,s,n,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,n,a,g
-e,x,f,w,t,a,f,w,n,n,t,b,s,s,w,w,p,w,o,p,n,v,d
-e,f,f,g,f,n,f,c,n,p,e,e,s,s,w,w,p,w,o,p,n,v,u
-e,f,f,n,f,n,f,c,n,k,e,e,s,s,w,w,p,w,o,p,n,y,u
-e,b,y,y,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,s,g
-e,x,f,w,t,a,f,w,n,n,t,b,s,s,w,w,p,w,o,p,u,v,d
-e,s,f,g,f,n,f,c,n,n,e,e,s,s,w,w,p,w,o,p,n,v,u
-e,x,f,n,f,n,f,c,n,k,e,e,s,s,w,w,p,w,o,p,n,v,u
-e,x,f,w,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,n,a,g
-e,x,y,n,t,l,f,c,b,n,e,r,s,y,w,w,p,w,o,p,k,y,p
-e,b,s,y,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,n,g
-e,x,y,w,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,s,g
-e,x,y,y,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,n,g
-e,x,s,w,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,n,m
-e,b,s,w,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,s,g
-e,f,y,y,t,a,f,c,b,p,e,r,s,y,w,w,p,w,o,p,n,y,p
-p,x,s,n,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,n,s,u
-e,f,f,g,f,n,f,c,n,n,e,e,s,s,w,w,p,w,o,p,k,y,u
-e,b,s,y,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,s,g
-e,f,y,n,t,a,f,c,b,w,e,r,s,y,w,w,p,w,o,p,n,s,p
-e,x,s,y,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,s,m
-e,x,f,n,f,n,f,c,n,k,e,e,s,s,w,w,p,w,o,p,k,y,u
-e,f,y,y,t,a,f,c,b,w,e,r,s,y,w,w,p,w,o,p,n,y,p
-e,x,y,n,t,a,f,c,b,n,e,r,s,y,w,w,p,w,o,p,n,y,p
-e,x,s,w,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,s,g
-p,x,y,w,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,k,s,g
-e,f,y,n,t,l,f,c,b,w,e,r,s,y,w,w,p,w,o,p,k,y,p
-p,f,s,n,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,k,s,g
-e,x,f,w,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,k,s,g
-e,b,y,w,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,s,m
-e,s,f,n,f,n,f,c,n,p,e,e,s,s,w,w,p,w,o,p,k,v,u
-e,x,y,y,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,s,m
-e,f,s,w,t,a,f,w,n,p,t,b,s,s,w,w,p,w,o,p,u,v,d
-e,x,s,y,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,s,m
-e,f,f,g,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,k,s,g
-p,x,y,w,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,n,s,g
-e,x,y,y,t,a,f,c,b,p,e,r,s,y,w,w,p,w,o,p,k,s,g
-e,f,f,n,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,k,a,g
-e,x,s,w,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,s,m
-e,b,y,y,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,s,m
-e,f,f,g,f,n,f,c,n,g,e,e,s,s,w,w,p,w,o,p,n,v,u
-e,x,f,y,t,l,f,w,n,p,t,b,s,s,w,w,p,w,o,p,u,v,d
-e,x,s,y,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,n,m
-e,f,f,n,f,n,f,c,n,n,e,e,s,s,w,w,p,w,o,p,n,y,u
-e,x,f,g,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,n,s,g
-e,x,y,y,t,l,f,c,b,p,e,r,s,y,w,w,p,w,o,p,k,s,p
-e,f,s,w,t,l,f,w,n,p,t,b,s,s,w,w,p,w,o,p,n,v,d
-e,x,s,y,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,s,g
-e,f,y,y,t,a,f,c,b,p,e,r,s,y,w,w,p,w,o,p,k,y,p
-p,x,y,n,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,k,v,u
-e,x,f,w,t,l,f,w,n,w,t,b,s,s,w,w,p,w,o,p,u,v,d
-e,x,s,w,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,n,g
-e,x,y,y,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,n,m
-e,b,s,y,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,s,g
-e,x,s,y,t,a,f,w,n,w,t,b,s,s,w,w,p,w,o,p,u,v,d
-e,x,s,y,t,a,f,w,n,n,t,b,s,s,w,w,p,w,o,p,u,v,d
-e,x,f,w,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,k,s,g
-e,b,s,w,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,s,m
-p,x,y,w,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,k,v,g
-e,x,y,y,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,n,g
-e,f,s,g,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,k,s,g
-p,x,s,n,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,n,s,g
-e,b,y,w,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,n,m
-e,x,s,w,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,n,m
-e,f,f,y,t,l,f,w,n,n,t,b,s,s,w,w,p,w,o,p,n,v,d
-e,x,s,y,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,n,g
-e,b,y,w,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,n,g
-e,x,y,n,t,a,f,c,b,w,e,r,s,y,w,w,p,w,o,p,n,y,g
-e,x,s,y,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,s,g
-e,x,f,n,f,n,f,c,n,p,e,e,s,s,w,w,p,w,o,p,n,v,u
-e,x,s,y,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,s,g
-e,x,y,y,t,l,f,c,b,w,e,r,s,y,w,w,p,w,o,p,n,y,g
-p,x,s,n,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,n,s,g
-e,x,s,w,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,n,m
-e,x,s,y,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,s,m
-e,x,y,w,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,n,g
-e,x,y,y,t,a,f,c,b,n,e,r,s,y,w,w,p,w,o,p,n,y,g
-e,f,y,y,t,a,f,c,b,w,e,r,s,y,w,w,p,w,o,p,k,s,g
-e,x,y,w,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,s,m
-e,s,f,g,f,n,f,c,n,g,e,e,s,s,w,w,p,w,o,p,k,v,u
-e,f,f,n,f,n,f,c,n,n,e,e,s,s,w,w,p,w,o,p,k,y,u
-e,s,f,g,f,n,f,c,n,n,e,e,s,s,w,w,p,w,o,p,k,v,u
-e,x,y,w,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,n,m
-p,x,s,w,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,k,s,u
-e,x,s,g,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,k,s,g
-e,x,y,y,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,s,g
-p,x,y,w,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,n,s,g
-e,x,s,y,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,n,m
-p,x,s,n,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,k,v,u
-e,b,s,y,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,n,m
-e,b,y,w,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,s,m
-p,x,y,w,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,n,s,g
-p,x,s,n,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,k,s,u
-e,x,s,g,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,n,a,g
-e,x,y,n,t,l,f,c,b,w,e,r,s,y,w,w,p,w,o,p,n,y,g
-e,f,f,w,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,n,a,g
-e,f,y,n,t,l,f,c,b,n,e,r,s,y,w,w,p,w,o,p,n,s,p
-e,b,y,w,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,s,m
-e,b,s,w,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,n,g
-e,x,s,g,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,n,s,g
-e,f,y,n,t,a,f,c,b,n,e,r,s,y,w,w,p,w,o,p,k,y,g
-e,x,s,w,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,s,m
-e,f,f,g,f,n,f,c,n,n,e,e,s,s,w,w,p,w,o,p,k,v,u
-e,f,s,n,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,n,a,g
-e,x,f,g,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,n,s,g
-e,f,s,w,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,k,a,g
-p,x,s,w,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,n,s,u
-e,x,y,y,t,l,f,c,b,p,e,r,s,y,w,w,p,w,o,p,k,s,g
-e,b,s,w,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,s,g
-e,f,f,g,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,k,a,g
-e,b,y,w,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,s,g
-e,b,s,y,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,n,m
-e,s,f,g,f,n,f,c,n,n,e,e,s,s,w,w,p,w,o,p,n,y,u
-e,f,y,y,t,l,f,c,b,p,e,r,s,y,w,w,p,w,o,p,k,s,g
-e,b,s,w,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,n,g
-e,x,f,w,t,l,f,w,n,n,t,b,s,s,w,w,p,w,o,p,u,v,d
-e,f,y,y,t,a,f,c,b,n,e,r,s,y,w,w,p,w,o,p,k,s,p
-e,f,f,g,f,n,f,c,n,p,e,e,s,s,w,w,p,w,o,p,k,y,u
-e,x,s,w,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,s,m
-e,f,f,y,t,l,f,w,n,w,t,b,s,s,w,w,p,w,o,p,u,v,d
-e,s,f,g,f,n,f,c,n,p,e,e,s,s,w,w,p,w,o,p,k,y,u
-e,b,y,w,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,s,g
-e,b,s,w,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,n,g
-e,f,f,n,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,n,s,g
-e,b,y,y,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,n,m
-e,x,f,n,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,k,v,d
-e,x,y,n,t,l,f,c,b,n,e,r,s,y,w,w,p,w,o,p,k,s,g
-e,x,y,n,t,a,f,c,b,p,e,r,s,y,w,w,p,w,o,p,n,s,g
-e,b,y,y,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,s,m
-e,f,y,y,t,a,f,c,b,p,e,r,s,y,w,w,p,w,o,p,k,s,g
-e,x,y,y,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,n,m
-e,x,s,n,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,k,s,g
-e,x,y,w,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,s,m
-e,b,y,y,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,n,m
-e,x,y,n,t,a,f,c,b,n,e,r,s,y,w,w,p,w,o,p,n,y,g
-e,f,y,n,t,l,f,c,b,w,e,r,s,y,w,w,p,w,o,p,k,s,p
-e,f,y,n,t,a,f,c,b,w,e,r,s,y,w,w,p,w,o,p,k,y,g
-e,f,y,y,t,l,f,c,b,n,e,r,s,y,w,w,p,w,o,p,k,s,p
-e,x,s,y,t,l,f,w,n,n,t,b,s,s,w,w,p,w,o,p,u,v,d
-e,b,y,w,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,s,m
-e,b,s,w,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,n,m
-e,x,y,y,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,s,g
-e,f,f,w,t,l,f,w,n,p,t,b,s,s,w,w,p,w,o,p,u,v,d
-e,x,s,y,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,s,g
-e,b,s,y,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,n,m
-e,f,y,y,t,a,f,c,b,w,e,r,s,y,w,w,p,w,o,p,k,y,g
-p,x,y,n,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,k,s,g
-p,x,y,w,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,k,v,u
-e,x,s,y,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,s,g
-e,f,f,w,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,n,s,g
-e,b,s,y,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,n,g
-e,x,y,y,t,l,f,c,b,w,e,r,s,y,w,w,p,w,o,p,k,y,p
-e,f,f,g,f,n,f,c,n,k,e,e,s,s,w,w,p,w,o,p,n,y,u
-e,b,s,w,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,s,g
-e,x,s,w,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,s,m
-p,x,y,n,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,k,s,g
-p,x,s,n,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,n,s,g
-e,b,s,w,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,s,g
-e,b,s,y,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,n,m
-e,f,y,n,t,a,f,c,b,w,e,r,s,y,w,w,p,w,o,p,n,s,g
-e,x,y,y,t,l,f,c,b,p,e,r,s,y,w,w,p,w,o,p,n,y,g
-e,f,y,y,t,l,f,c,b,n,e,r,s,y,w,w,p,w,o,p,n,y,p
-e,x,y,w,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,n,g
-e,x,s,y,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,s,g
-e,f,f,n,f,n,f,c,n,p,e,e,s,s,w,w,p,w,o,p,n,v,u
-e,f,f,n,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,k,a,g
-e,x,f,y,t,l,f,w,n,w,t,b,s,s,w,w,p,w,o,p,u,v,d
-e,f,y,y,t,l,f,c,b,w,e,r,s,y,w,w,p,w,o,p,n,s,p
-e,f,f,n,f,n,f,c,n,g,e,e,s,s,w,w,p,w,o,p,k,y,u
-e,f,f,g,f,n,f,c,n,k,e,e,s,s,w,w,p,w,o,p,n,v,u
-e,x,y,n,t,l,f,c,b,n,e,r,s,y,w,w,p,w,o,p,n,y,g
-e,x,y,y,t,l,f,c,b,w,e,r,s,y,w,w,p,w,o,p,n,y,p
-e,b,s,w,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,s,m
-e,x,s,w,t,l,f,w,n,n,t,b,s,s,w,w,p,w,o,p,n,v,d
-e,x,y,w,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,n,m
-e,x,f,y,t,a,f,w,n,w,t,b,s,s,w,w,p,w,o,p,u,v,d
-e,b,s,y,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,n,g
-e,x,f,n,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,n,v,d
-e,x,y,w,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,n,m
-e,f,f,g,f,n,f,c,n,k,e,e,s,s,w,w,p,w,o,p,k,v,u
-e,b,y,w,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,s,m
-e,x,y,y,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,n,m
-e,x,s,w,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,s,g
-e,b,s,y,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,s,g
-e,b,y,w,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,s,g
-e,x,y,n,t,a,f,c,b,p,e,r,s,y,w,w,p,w,o,p,n,s,p
-e,f,s,g,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,n,a,g
-p,x,y,n,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,n,s,g
-e,b,y,w,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,s,g
-e,b,s,y,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,s,m
-p,x,y,n,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,k,v,g
-p,x,s,n,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,n,v,u
-e,f,s,n,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,n,a,g
-p,x,s,n,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,k,s,g
-e,f,f,g,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,n,a,g
-e,x,y,y,t,l,f,c,b,n,e,r,s,y,w,w,p,w,o,p,k,s,p
-e,b,y,w,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,n,m
-e,f,y,y,t,l,f,c,b,p,e,r,s,y,w,w,p,w,o,p,n,s,p
-e,f,y,y,t,l,f,c,b,p,e,r,s,y,w,w,p,w,o,p,k,y,p
-e,b,y,y,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,n,m
-e,s,f,n,f,n,f,c,n,p,e,e,s,s,w,w,p,w,o,p,k,y,u
-e,x,s,w,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,n,m
-e,b,y,y,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,n,m
-e,f,y,y,t,a,f,c,b,w,e,r,s,y,w,w,p,w,o,p,k,s,p
-e,f,s,n,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,n,a,g
-e,x,s,w,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,s,m
-e,b,y,y,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,n,g
-e,b,y,y,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,s,g
-e,b,y,y,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,s,m
-e,b,s,w,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,s,m
-e,x,y,y,t,a,f,c,b,n,e,r,s,y,w,w,p,w,o,p,k,s,p
-e,x,y,n,t,a,f,c,b,n,e,r,s,y,w,w,p,w,o,p,n,s,g
-e,s,f,n,f,n,f,c,n,g,e,e,s,s,w,w,p,w,o,p,k,v,u
-e,f,y,n,t,a,f,c,b,n,e,r,s,y,w,w,p,w,o,p,n,s,g
-e,x,s,w,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,n,g
-e,f,s,y,t,l,f,w,n,w,t,b,s,s,w,w,p,w,o,p,n,v,d
-e,x,s,n,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,n,a,g
-e,f,y,y,t,a,f,c,b,n,e,r,s,y,w,w,p,w,o,p,k,s,g
-p,x,y,n,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,n,v,g
-e,x,s,w,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,n,g
-e,x,f,w,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,n,s,g
-e,b,s,w,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,s,m
-e,x,y,w,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,s,m
-e,f,y,y,t,l,f,c,b,w,e,r,s,y,w,w,p,w,o,p,k,y,g
-e,x,f,n,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,n,s,g
-p,x,s,w,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,n,s,g
-p,x,y,w,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,k,s,u
-e,x,y,w,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,n,g
-e,b,y,w,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,s,g
-e,x,f,w,t,a,f,w,n,w,t,b,s,s,w,w,p,w,o,p,n,v,d
-p,x,y,w,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,k,s,u
-e,x,s,w,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,s,g
-e,x,f,g,f,n,f,c,n,p,e,e,s,s,w,w,p,w,o,p,k,v,u
-e,x,f,n,f,n,f,c,n,p,e,e,s,s,w,w,p,w,o,p,k,v,u
-e,b,s,w,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,s,m
-e,x,y,n,t,a,f,c,b,p,e,r,s,y,w,w,p,w,o,p,k,s,g
-e,b,y,w,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,n,m
-e,f,y,y,t,l,f,c,b,n,e,r,s,y,w,w,p,w,o,p,n,s,p
-e,x,y,w,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,n,g
-e,b,y,w,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,s,g
-e,b,y,w,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,s,m
-p,x,s,n,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,n,v,u
-e,f,s,y,t,a,f,w,n,n,t,b,s,s,w,w,p,w,o,p,u,v,d
-e,x,y,n,t,a,f,c,b,n,e,r,s,y,w,w,p,w,o,p,n,s,p
-e,b,s,y,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,n,g
-e,f,s,w,t,a,f,w,n,n,t,b,s,s,w,w,p,w,o,p,u,v,d
-e,b,y,y,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,s,g
-e,b,y,y,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,n,m
-e,b,s,y,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,n,g
-e,f,y,y,t,l,f,c,b,n,e,r,s,y,w,w,p,w,o,p,n,y,g
-e,x,s,w,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,n,a,g
-e,b,y,y,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,n,m
-e,x,y,n,t,a,f,c,b,n,e,r,s,y,w,w,p,w,o,p,k,y,p
-e,b,s,w,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,n,g
-e,f,y,n,t,l,f,c,b,p,e,r,s,y,w,w,p,w,o,p,k,y,g
-e,x,s,w,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,n,g
-e,x,f,n,f,n,f,c,n,g,e,e,s,s,w,w,p,w,o,p,n,v,u
-e,x,y,n,t,l,f,c,b,w,e,r,s,y,w,w,p,w,o,p,k,s,p
-e,f,f,g,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,k,a,g
-e,x,s,w,t,l,f,w,n,p,t,b,s,s,w,w,p,w,o,p,n,v,d
-e,f,f,n,f,n,f,c,n,g,e,e,s,s,w,w,p,w,o,p,n,y,u
-e,b,y,y,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,s,m
-e,b,s,w,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,n,m
-e,x,s,w,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,n,g
-e,b,s,y,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,s,m
-e,x,y,y,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,n,m
-e,x,s,y,t,a,f,w,n,p,t,b,s,s,w,w,p,w,o,p,n,v,d
-e,f,f,g,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,k,a,g
-e,x,y,y,t,a,f,c,b,p,e,r,s,y,w,w,p,w,o,p,n,s,g
-p,x,y,n,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,n,s,u
-e,x,y,n,t,a,f,c,b,p,e,r,s,y,w,w,p,w,o,p,n,y,g
-e,x,s,y,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,n,g
-e,b,y,w,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,n,m
-e,x,y,y,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,s,m
-e,s,f,n,f,n,f,c,n,g,e,e,s,s,w,w,p,w,o,p,k,y,u
-e,b,y,y,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,s,m
-e,x,s,w,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,s,m
-e,x,s,w,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,n,g
-p,x,s,n,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,k,v,g
-e,x,f,n,f,n,f,c,n,n,e,e,s,s,w,w,p,w,o,p,k,v,u
-e,x,y,y,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,n,g
-p,x,y,n,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,n,s,g
-e,x,y,y,t,l,f,c,b,p,e,r,s,y,w,w,p,w,o,p,k,y,p
-e,b,y,y,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,s,g
-e,f,y,n,t,l,f,c,b,p,e,r,s,y,w,w,p,w,o,p,k,y,p
-e,x,y,w,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,n,g
-e,x,s,y,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,n,g
-p,x,y,n,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,n,v,g
-p,x,y,n,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,n,s,u
-e,b,y,y,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,n,g
-e,x,s,w,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,k,s,g
-p,x,s,w,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,n,v,g
-e,x,y,n,t,l,f,c,b,w,e,r,s,y,w,w,p,w,o,p,n,s,p
-e,b,s,w,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,n,m
-e,x,y,y,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,n,g
-p,x,s,n,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,n,v,u
-e,b,y,y,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,n,g
-e,b,y,w,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,n,g
-e,f,y,y,t,l,f,c,b,w,e,r,s,y,w,w,p,w,o,p,k,s,g
-e,b,y,w,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,s,g
-e,x,y,y,t,a,f,c,b,p,e,r,s,y,w,w,p,w,o,p,k,y,g
-e,b,s,y,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,s,m
-e,x,s,y,t,l,f,w,n,p,t,b,s,s,w,w,p,w,o,p,n,v,d
-e,b,s,y,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,n,m
-e,b,s,y,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,n,m
-p,x,y,w,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,k,v,u
-p,x,y,w,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,n,v,g
-p,x,s,n,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,k,v,u
-e,x,s,w,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,s,m
-e,x,y,n,t,a,f,c,b,p,e,r,s,y,w,w,p,w,o,p,k,y,g
-e,x,y,y,t,a,f,c,b,n,e,r,s,y,w,w,p,w,o,p,k,y,g
-e,x,s,w,t,a,f,w,n,w,t,b,s,s,w,w,p,w,o,p,n,v,d
-e,b,y,w,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,n,g
-e,b,s,y,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,n,m
-e,x,f,g,f,n,f,c,n,k,e,e,s,s,w,w,p,w,o,p,k,v,u
-e,b,s,y,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,s,m
-e,x,s,w,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,s,g
-e,x,y,n,t,a,f,c,b,w,e,r,s,y,w,w,p,w,o,p,k,s,p
-e,x,y,y,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,s,m
-e,f,f,w,t,l,f,w,n,n,t,b,s,s,w,w,p,w,o,p,n,v,d
-e,f,f,w,t,a,f,w,n,w,t,b,s,s,w,w,p,w,o,p,u,v,d
-e,x,s,w,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,s,m
-e,f,s,y,t,l,f,w,n,p,t,b,s,s,w,w,p,w,o,p,u,v,d
-e,s,f,g,f,n,f,c,n,p,e,e,s,s,w,w,p,w,o,p,k,v,u
-e,f,f,y,t,a,f,w,n,w,t,b,s,s,w,w,p,w,o,p,n,v,d
-e,x,y,n,t,l,f,c,b,p,e,r,s,y,w,w,p,w,o,p,k,y,g
-e,x,y,y,t,l,f,c,b,w,e,r,s,y,w,w,p,w,o,p,n,s,g
-e,x,y,y,t,l,f,c,b,p,e,r,s,y,w,w,p,w,o,p,n,y,p
-p,x,s,n,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,n,v,g
-e,x,s,y,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,s,m
-p,x,y,w,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,n,s,u
-e,x,y,y,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,s,g
-e,x,y,w,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,s,g
-e,f,s,y,t,a,f,w,n,w,t,b,s,s,w,w,p,w,o,p,u,v,d
-p,x,y,n,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,n,v,g
-e,b,s,w,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,s,g
-e,s,f,g,f,n,f,c,n,n,e,e,s,s,w,w,p,w,o,p,k,y,u
-e,x,s,y,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,n,g
-e,x,f,g,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,n,s,g
-e,x,y,y,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,s,m
-e,b,y,w,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,s,g
-e,b,s,y,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,n,m
-e,x,s,w,t,l,f,w,n,w,t,b,s,s,w,w,p,w,o,p,u,v,d
-e,b,s,w,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,s,g
-e,f,y,n,t,a,f,c,b,p,e,r,s,y,w,w,p,w,o,p,k,s,g
-e,s,f,g,f,n,f,c,n,g,e,e,s,s,w,w,p,w,o,p,n,y,u
-e,x,f,w,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,n,a,g
-e,s,f,n,f,n,f,c,n,n,e,e,s,s,w,w,p,w,o,p,k,v,u
-e,f,y,n,t,l,f,c,b,p,e,r,s,y,w,w,p,w,o,p,k,s,p
-e,f,s,y,t,l,f,w,n,n,t,b,s,s,w,w,p,w,o,p,u,v,d
-e,b,s,y,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,s,g
-e,b,s,y,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,n,m
-p,x,s,n,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,k,v,g
-e,f,y,n,t,l,f,c,b,w,e,r,s,y,w,w,p,w,o,p,n,y,p
-e,x,s,y,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,s,g
-e,f,y,y,t,a,f,c,b,p,e,r,s,y,w,w,p,w,o,p,n,s,g
-e,x,y,w,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,n,g
-e,x,y,y,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,s,m
-e,x,y,y,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,s,g
-e,f,f,n,f,n,f,c,n,n,e,e,s,s,w,w,p,w,o,p,n,v,u
-e,f,y,y,t,l,f,c,b,p,e,r,s,y,w,w,p,w,o,p,n,y,p
-e,x,f,g,f,n,f,c,n,p,e,e,s,s,w,w,p,w,o,p,n,y,u
-e,x,s,y,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,n,g
-e,f,y,n,t,a,f,c,b,p,e,r,s,y,w,w,p,w,o,p,k,y,p
-e,x,s,y,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,n,g
-e,x,y,y,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,s,m
-e,x,f,n,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,k,v,d
-e,x,s,w,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,s,m
-e,x,y,y,t,l,f,c,b,p,e,r,s,y,w,w,p,w,o,p,n,s,p
-e,x,y,w,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,n,m
-e,x,s,w,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,n,g
-e,f,y,y,t,l,f,c,b,w,e,r,s,y,w,w,p,w,o,p,n,y,g
-e,b,s,y,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,n,g
-e,f,f,w,t,a,f,w,n,w,t,b,s,s,w,w,p,w,o,p,n,v,d
-e,f,s,g,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,n,s,g
-e,x,y,w,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,n,g
-e,x,y,w,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,n,g
-e,f,y,y,t,l,f,c,b,n,e,r,s,y,w,w,p,w,o,p,k,s,g
-e,x,s,w,t,a,f,w,n,p,t,b,s,s,w,w,p,w,o,p,u,v,d
-e,x,s,y,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,s,g
-e,f,y,y,t,a,f,c,b,n,e,r,s,y,w,w,p,w,o,p,n,y,g
-e,x,y,y,t,a,f,c,b,w,e,r,s,y,w,w,p,w,o,p,k,y,p
-e,x,s,y,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,n,g
-e,x,f,n,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,n,y,d
-e,f,y,y,t,l,f,c,b,p,e,r,s,y,w,w,p,w,o,p,k,y,g
-e,x,s,w,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,n,g
-e,b,s,y,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,s,m
-e,x,s,y,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,s,m
-e,x,y,n,t,l,f,c,b,p,e,r,s,y,w,w,p,w,o,p,n,s,g
-e,b,y,y,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,n,g
-e,x,s,w,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,n,m
-e,x,y,w,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,s,m
-e,x,s,y,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,s,g
-e,f,y,y,t,l,f,c,b,n,e,r,s,y,w,w,p,w,o,p,k,y,p
-e,x,f,w,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,n,a,g
-e,x,s,n,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,k,s,g
-e,x,s,w,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,n,s,g
-e,x,s,y,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,n,m
-e,f,f,w,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,k,s,g
-p,f,s,n,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,k,s,g
-e,x,f,g,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,n,s,g
-p,f,y,n,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,n,v,u
-p,x,s,n,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,n,v,g
-e,b,s,w,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,n,g
-e,x,f,g,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,k,a,g
-e,x,s,w,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,n,g
-e,f,f,g,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,k,a,g
-e,x,s,g,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,k,s,g
-e,x,s,w,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,n,g
-e,x,s,w,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,s,m
-e,x,y,g,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,n,y,d
-e,f,f,n,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,n,a,g
-e,x,s,w,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,k,s,g
-e,f,f,n,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,k,a,g
-e,x,s,n,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,n,a,g
-e,x,y,y,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,n,g
-e,x,s,y,t,l,f,w,n,w,t,b,s,s,w,w,p,w,o,p,u,v,d
-e,f,f,w,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,n,s,g
-e,x,s,n,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,n,a,g
-e,x,s,g,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,k,a,g
-p,x,y,n,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,k,s,u
-e,x,f,g,f,n,f,c,n,k,e,e,s,s,w,w,p,w,o,p,n,y,u
-e,b,s,w,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,n,m
-e,x,s,g,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,k,a,g
-e,f,f,g,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,n,s,g
-p,f,s,n,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,n,v,g
-e,x,s,y,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,s,g
-e,x,f,n,f,n,f,c,n,n,e,e,s,s,w,w,p,w,o,p,k,y,u
-e,f,s,w,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,k,s,g
-e,x,y,y,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,n,m
-e,x,f,n,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,k,s,g
-e,f,s,g,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,k,a,g
-e,x,y,e,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,n,y,d
-e,b,y,y,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,n,g
-e,x,f,w,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,k,s,g
-p,x,s,w,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,n,s,g
-e,x,f,g,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,n,s,g
-e,f,f,g,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,n,v,d
-e,f,f,n,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,n,a,g
-e,f,f,w,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,k,s,g
-e,b,y,w,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,s,g
-e,f,f,n,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,k,a,g
-p,f,y,n,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,n,v,u
-p,x,y,w,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,n,s,u
-e,b,s,y,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,s,g
-e,f,s,g,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,n,a,g
-e,x,f,g,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,n,a,g
-e,f,f,n,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,n,s,g
-e,b,s,w,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,s,m
-p,x,s,w,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,k,v,u
-e,x,y,n,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,k,v,d
-e,f,f,w,t,l,f,w,n,n,t,b,s,s,w,w,p,w,o,p,u,v,d
-e,x,y,y,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,n,m
-e,x,f,n,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,k,a,g
-e,f,f,w,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,k,a,g
-e,x,f,n,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,k,v,d
-e,x,f,n,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,n,y,d
-e,f,s,g,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,k,s,g
-e,f,s,g,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,k,s,g
-p,f,s,n,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,k,s,g
-e,x,f,n,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,k,y,d
-e,x,s,n,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,k,s,g
-e,x,f,w,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,k,s,g
-e,f,f,n,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,n,s,g
-e,x,f,n,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,k,v,d
-e,f,f,n,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,k,v,d
-e,x,f,g,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,k,s,g
-e,f,f,n,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,k,y,d
-e,x,s,g,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,k,s,g
-e,b,y,w,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,n,g
-e,x,f,n,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,k,y,d
-e,x,y,y,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,s,g
-e,f,f,g,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,k,a,g
-e,f,f,n,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,n,y,d
-e,x,s,y,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,s,m
-e,f,f,g,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,k,s,g
-e,x,f,n,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,n,s,g
-p,x,s,w,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,k,s,u
-e,x,f,w,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,n,s,g
-e,f,f,w,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,k,a,g
-e,x,s,w,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,n,a,g
-e,x,s,g,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,k,a,g
-e,x,y,y,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,n,g
-e,x,s,y,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,n,m
-e,x,s,g,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,k,a,g
-e,x,s,n,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,n,s,g
-e,f,f,n,f,n,f,c,n,n,e,e,s,s,w,w,p,w,o,p,k,v,u
-e,f,s,w,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,k,s,g
-e,f,s,w,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,k,a,g
-e,x,y,y,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,s,m
-e,f,s,w,t,a,f,w,n,w,t,b,s,s,w,w,p,w,o,p,u,v,d
-e,b,s,w,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,n,m
-p,f,y,w,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,k,v,u
-e,x,f,g,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,k,y,d
-e,f,y,n,t,l,f,c,b,p,e,r,s,y,w,w,p,w,o,p,n,y,g
-e,f,s,n,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,n,s,g
-p,x,s,w,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,k,v,g
-e,x,y,n,t,l,f,c,b,p,e,r,s,y,w,w,p,w,o,p,n,y,g
-e,x,s,g,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,n,a,g
-e,x,s,w,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,n,a,g
-e,f,f,w,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,n,s,g
-e,x,f,n,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,n,a,g
-e,x,s,w,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,s,g
-e,x,y,w,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,s,m
-e,x,s,w,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,k,s,g
-e,f,y,y,t,a,f,c,b,p,e,r,s,y,w,w,p,w,o,p,n,s,p
-e,f,s,n,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,k,s,g
-e,f,f,g,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,k,s,g
-e,f,f,y,t,a,f,w,n,p,t,b,s,s,w,w,p,w,o,p,u,v,d
-e,f,y,n,t,l,f,c,b,n,e,r,s,y,w,w,p,w,o,p,n,s,g
-e,x,f,n,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,n,y,d
-p,f,s,n,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,n,s,g
-e,x,f,n,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,k,v,d
-e,f,f,n,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,n,a,g
-e,b,s,y,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,n,m
-e,b,y,w,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,n,g
-e,x,y,w,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,s,m
-p,f,y,w,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,k,v,u
-p,f,y,w,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,n,s,g
-e,x,f,n,f,n,f,c,n,n,e,e,s,s,w,w,p,w,o,p,n,v,u
-e,f,s,y,t,l,f,w,n,w,t,b,s,s,w,w,p,w,o,p,u,v,d
-e,x,y,y,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,n,m
-e,x,s,w,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,k,a,g
-e,x,f,g,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,n,y,d
-p,x,y,n,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,k,v,g
-e,x,f,g,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,n,s,g
-e,f,f,n,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,n,s,g
-e,x,s,w,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,n,s,g
-e,f,f,w,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,k,a,g
-e,x,y,y,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,s,m
-e,f,f,y,t,a,f,w,n,n,t,b,s,s,w,w,p,w,o,p,n,v,d
-e,x,f,g,f,n,f,c,n,k,e,e,s,s,w,w,p,w,o,p,k,y,u
-p,x,s,n,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,k,s,u
-e,x,y,y,t,a,f,c,b,w,e,r,s,y,w,w,p,w,o,p,n,y,p
-e,f,y,n,t,a,f,c,b,n,e,r,s,y,w,w,p,w,o,p,k,s,p
-e,x,f,w,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,k,a,g
-e,x,y,n,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,n,v,d
-e,x,s,g,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,n,a,g
-e,x,s,w,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,s,g
-e,x,f,g,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,n,s,g
-p,f,y,n,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,n,s,u
-e,x,f,n,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,k,v,d
-e,x,f,g,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,k,a,g
-e,x,f,e,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,k,v,d
-e,x,f,n,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,k,y,d
-e,x,f,g,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,n,a,g
-e,x,y,y,t,l,f,c,b,w,e,r,s,y,w,w,p,w,o,p,n,s,p
-e,x,f,n,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,k,s,g
-e,b,s,y,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,s,g
-e,x,f,w,t,l,f,w,n,p,t,b,s,s,w,w,p,w,o,p,n,v,d
-e,f,s,g,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,n,s,g
-e,x,y,y,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,n,g
-e,x,f,n,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,n,s,g
-e,f,s,w,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,n,a,g
-e,x,s,y,t,l,f,w,n,n,t,b,s,s,w,w,p,w,o,p,n,v,d
-e,x,s,g,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,n,s,g
-e,x,f,g,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,n,s,g
-p,f,y,w,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,k,s,u
-e,f,f,y,t,l,f,w,n,p,t,b,s,s,w,w,p,w,o,p,u,v,d
-e,x,y,w,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,s,g
-e,f,f,w,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,n,a,g
-e,x,f,n,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,k,v,d
-e,x,y,n,t,l,f,c,b,n,e,r,s,y,w,w,p,w,o,p,n,s,g
-e,x,s,w,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,n,a,g
-p,f,y,w,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,n,v,g
-e,x,s,y,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,s,g
-e,x,f,g,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,k,y,d
-e,f,y,y,t,l,f,c,b,p,e,r,s,y,w,w,p,w,o,p,k,s,p
-e,f,s,g,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,k,a,g
-e,f,f,n,f,n,f,c,n,p,e,e,s,s,w,w,p,w,o,p,n,y,u
-p,f,y,w,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,n,v,g
-e,f,f,g,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,n,s,g
-e,x,y,n,t,l,f,c,b,p,e,r,s,y,w,w,p,w,o,p,k,s,p
-e,f,f,w,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,k,a,g
-p,f,y,n,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,n,v,u
-e,b,s,y,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,n,m
-e,f,f,g,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,k,s,g
-e,b,s,w,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,n,g
-e,f,f,g,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,n,s,g
-e,b,y,w,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,n,m
-e,x,y,w,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,n,m
-p,f,y,n,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,k,v,g
-e,x,y,y,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,s,g
-e,s,f,n,f,n,f,c,n,n,e,e,s,s,w,w,p,w,o,p,k,y,u
-e,x,y,n,t,a,f,c,b,n,e,r,s,y,w,w,p,w,o,p,k,y,g
-e,f,f,g,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,n,a,g
-e,f,f,y,t,l,f,w,n,n,t,b,s,s,w,w,p,w,o,p,u,v,d
-e,x,s,g,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,n,a,g
-p,f,y,n,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,k,v,u
-e,x,f,g,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,k,a,g
-e,x,s,g,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,k,a,g
-e,x,f,g,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,k,s,g
-p,x,y,n,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,n,s,g
-e,x,f,n,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,n,a,g
-e,f,s,w,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,n,s,g
-e,x,f,g,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,n,s,g
-p,x,y,n,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,n,s,g
-e,f,f,n,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,n,a,g
-e,f,s,w,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,k,a,g
-p,x,y,w,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,k,v,u
-e,f,f,g,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,n,a,g
-e,b,y,y,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,n,g
-e,x,s,n,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,n,a,g
-e,f,f,n,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,k,a,g
-p,f,s,w,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,k,s,g
-e,x,y,n,t,a,f,c,b,w,e,r,s,y,w,w,p,w,o,p,n,s,g
-e,f,f,w,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,k,a,g
-e,f,y,n,t,l,f,c,b,p,e,r,s,y,w,w,p,w,o,p,k,s,g
-e,f,f,g,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,k,s,g
-e,f,f,g,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,n,s,g
-e,f,s,g,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,n,s,g
-e,f,f,n,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,k,s,g
-e,x,s,g,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,n,s,g
-e,x,f,w,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,k,s,g
-e,f,f,w,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,k,a,g
-e,x,s,n,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,n,s,g
-e,x,f,n,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,k,v,d
-e,f,s,y,t,a,f,w,n,p,t,b,s,s,w,w,p,w,o,p,u,v,d
-e,x,f,w,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,k,s,g
-e,f,s,w,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,k,a,g
-e,b,s,y,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,n,g
-e,x,f,w,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,n,a,g
-e,x,f,g,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,k,a,g
-p,f,s,n,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,n,v,u
-e,x,f,g,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,k,a,g
-p,x,s,n,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,k,s,g
-e,f,s,w,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,n,s,g
-e,x,y,w,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,n,g
-e,x,f,n,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,k,s,g
-e,x,f,n,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,n,y,d
-e,b,y,y,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,s,g
-e,f,f,w,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,n,a,g
-e,x,s,n,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,n,a,g
-e,f,f,w,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,n,a,g
-e,x,f,n,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,n,v,d
-e,f,f,w,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,n,s,g
-e,x,s,w,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,k,a,g
-e,x,s,n,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,n,a,g
-e,x,f,w,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,n,s,g
-e,f,s,g,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,k,s,g
-e,x,f,g,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,n,a,g
-e,x,f,g,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,k,a,g
-e,f,s,n,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,k,a,g
-e,f,s,w,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,k,a,g
-e,f,y,n,t,a,f,c,b,n,e,r,s,y,w,w,p,w,o,p,k,y,p
-p,x,y,w,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,n,v,g
-e,f,s,g,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,k,a,g
-p,x,y,w,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,k,v,g
-e,b,y,w,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,s,m
-e,x,s,g,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,k,s,g
-e,f,f,g,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,k,a,g
-e,f,f,w,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,n,s,g
-e,f,s,g,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,k,s,g
-e,b,y,w,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,n,g
-e,x,f,g,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,k,s,g
-e,x,f,e,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,n,v,d
-e,x,y,y,t,a,f,c,b,p,e,r,s,y,w,w,p,w,o,p,k,s,p
-e,x,s,w,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,k,s,g
-e,b,s,w,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,n,m
-p,x,s,n,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,k,v,g
-e,x,f,n,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,k,y,d
-e,f,s,n,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,k,s,g
-e,b,s,w,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,n,m
-e,x,s,g,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,k,s,g
-e,x,f,n,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,k,v,d
-e,f,f,w,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,n,a,g
-e,x,f,n,f,n,f,c,n,g,e,e,s,s,w,w,p,w,o,p,k,v,u
-e,x,f,g,f,n,f,c,n,g,e,e,s,s,w,w,p,w,o,p,k,v,u
-e,x,f,g,f,n,f,c,n,p,e,e,s,s,w,w,p,w,o,p,k,y,u
-e,b,y,y,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,s,m
-e,f,f,n,f,n,f,c,n,g,e,e,s,s,w,w,p,w,o,p,n,v,u
-e,x,f,w,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,n,a,g
-e,f,y,n,t,a,f,c,b,w,e,r,s,y,w,w,p,w,o,p,k,s,p
-e,x,y,n,t,l,f,c,b,p,e,r,s,y,w,w,p,w,o,p,k,y,p
-e,x,y,y,t,l,f,c,b,p,e,r,s,y,w,w,p,w,o,p,k,y,g
-e,f,f,g,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,n,a,g
-e,x,y,w,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,n,g
-e,x,y,w,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,s,g
-e,f,s,w,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,n,a,g
-e,f,s,g,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,k,a,g
-p,f,s,n,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,k,v,u
-e,x,y,y,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,s,g
-e,x,f,n,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,n,s,g
-e,x,f,w,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,k,s,g
-e,x,s,y,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,s,m
-e,x,f,e,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,n,v,d
-e,f,y,n,t,l,f,c,b,p,e,r,s,y,w,w,p,w,o,p,n,s,p
-e,b,s,w,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,n,g
-p,x,s,n,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,k,s,g
-e,x,f,g,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,k,s,g
-e,b,y,w,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,n,m
-e,x,s,w,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,n,g
-e,b,y,y,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,s,m
-e,x,y,w,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,n,g
-e,x,s,y,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,n,g
-e,b,s,w,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,n,g
-e,f,s,n,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,k,s,g
-p,f,s,w,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,n,s,g
-e,f,s,n,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,n,a,g
-e,f,f,n,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,k,a,g
-p,f,y,w,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,k,s,u
-e,f,s,w,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,k,a,g
-e,x,f,w,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,n,s,g
-e,x,s,n,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,k,a,g
-e,f,s,n,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,k,a,g
-e,f,y,y,t,a,f,c,b,n,e,r,s,y,w,w,p,w,o,p,k,y,g
-e,x,f,g,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,k,a,g
-e,x,f,n,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,k,a,g
-p,f,s,n,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,n,v,g
-e,x,s,w,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,n,a,g
-e,f,f,n,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,k,s,g
-e,f,f,w,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,n,a,g
-e,f,f,w,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,n,s,g
-e,x,y,y,t,a,f,c,b,n,e,r,s,y,w,w,p,w,o,p,k,s,g
-e,x,s,g,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,n,s,g
-e,x,f,g,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,n,a,g
-e,f,f,n,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,k,s,g
-e,f,s,n,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,k,a,g
-e,x,f,w,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,k,s,g
-e,x,y,e,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,n,v,d
-e,x,f,n,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,k,a,g
-p,x,s,w,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,n,v,g
-e,x,y,n,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,k,y,d
-e,x,f,n,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,n,s,g
-p,f,y,w,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,n,s,g
-e,f,f,g,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,k,a,g
-e,x,f,w,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,n,a,g
-e,f,s,n,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,k,a,g
-e,f,f,g,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,k,a,g
-e,f,s,g,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,k,a,g
-e,f,f,g,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,n,s,g
-e,x,s,g,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,n,s,g
-e,x,s,n,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,k,s,g
-p,f,s,w,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,k,v,u
-e,x,s,g,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,n,a,g
-p,x,s,w,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,k,s,g
-p,f,s,w,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,n,s,u
-e,f,s,n,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,n,s,g
-e,x,f,g,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,k,a,g
-e,f,s,g,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,n,a,g
-e,x,s,g,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,k,a,g
-e,f,f,g,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,k,a,g
-e,f,f,g,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,n,a,g
-e,x,y,g,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,n,y,d
-e,x,f,n,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,n,v,d
-e,f,s,w,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,n,a,g
-p,f,y,w,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,n,v,g
-p,f,y,n,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,n,s,g
-e,x,s,w,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,n,a,g
-p,f,s,n,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,n,s,g
-e,f,f,n,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,k,s,g
-e,f,s,w,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,n,a,g
-p,f,s,n,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,k,v,g
-p,f,y,w,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,k,v,g
-p,f,y,n,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,k,v,g
-p,f,s,w,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,k,s,g
-e,x,s,w,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,k,a,g
-e,x,f,n,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,n,v,d
-e,f,s,w,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,n,a,g
-e,f,f,g,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,k,a,g
-p,f,y,n,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,k,v,u
-e,x,s,w,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,n,s,g
-e,x,y,e,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,n,y,d
-e,f,s,g,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,k,s,g
-e,x,s,w,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,n,s,g
-p,f,s,w,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,n,s,g
-e,f,f,n,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,n,a,g
-e,x,f,w,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,k,s,g
-p,f,s,w,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,k,s,u
-e,x,f,n,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,n,a,g
-e,f,f,w,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,n,s,g
-e,x,s,w,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,k,s,g
-e,f,s,w,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,n,s,g
-e,b,y,y,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,n,m
-e,x,s,g,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,n,a,g
-e,x,f,w,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,k,s,g
-e,x,s,n,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,n,s,g
-e,x,f,n,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,k,a,g
-e,f,f,g,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,k,a,g
-p,x,y,n,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,k,s,u
-e,f,s,g,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,k,s,g
-e,x,f,n,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,k,y,d
-e,f,s,w,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,k,a,g
-e,x,f,n,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,k,v,d
-e,x,f,g,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,n,a,g
-e,x,f,w,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,k,a,g
-p,f,s,w,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,n,v,g
-p,f,s,w,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,k,v,g
-e,x,s,g,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,n,a,g
-e,x,f,n,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,n,v,d
-e,x,s,g,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,k,s,g
-e,x,y,y,t,a,f,c,b,w,e,r,s,y,w,w,p,w,o,p,k,s,p
-e,f,s,w,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,n,a,g
-e,x,s,w,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,n,a,g
-e,x,f,g,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,k,a,g
-e,f,s,w,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,n,s,g
-e,x,f,n,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,n,y,d
-e,f,s,g,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,n,a,g
-e,f,s,g,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,n,s,g
-e,f,f,n,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,n,s,g
-e,f,s,w,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,n,s,g
-e,f,s,w,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,n,a,g
-e,f,s,g,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,n,s,g
-e,f,f,w,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,n,s,g
-e,f,s,n,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,n,a,g
-p,f,s,w,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,n,s,g
-e,x,f,g,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,n,a,g
-e,b,y,y,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,n,g
-e,f,f,n,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,n,s,g
-e,f,s,g,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,n,s,g
-e,x,f,n,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,k,y,d
-e,b,s,y,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,s,m
-e,x,s,w,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,k,s,g
-e,f,f,n,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,n,y,d
-p,x,s,w,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,n,s,g
-e,f,f,w,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,k,s,g
-p,f,y,w,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,n,s,u
-e,x,s,n,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,n,a,g
-e,f,s,n,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,k,a,g
-e,b,s,y,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,n,g
-e,f,s,g,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,n,a,g
-e,x,s,g,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,n,a,g
-e,f,f,g,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,k,s,g
-e,x,f,g,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,k,s,g
-e,f,s,w,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,k,a,g
-p,f,s,n,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,k,v,g
-e,f,f,g,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,n,a,g
-e,f,s,w,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,k,a,g
-e,x,f,n,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,k,y,d
-e,x,s,n,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,n,a,g
-e,x,f,e,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,n,y,d
-e,x,y,y,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,n,g
-e,f,f,w,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,k,s,g
-e,x,s,w,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,k,a,g
-e,f,f,n,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,n,a,g
-p,f,s,w,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,k,s,u
-e,f,f,g,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,n,y,d
-e,f,s,w,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,n,s,g
-e,f,f,w,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,k,a,g
-p,f,y,n,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,n,v,g
-e,x,s,w,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,k,s,g
-e,x,f,n,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,n,y,d
-e,x,y,g,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,n,v,d
-e,x,y,g,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,n,v,d
-e,f,s,n,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,n,s,g
-e,f,s,w,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,n,s,g
-e,f,s,n,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,n,a,g
-e,x,s,n,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,k,a,g
-e,f,f,w,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,k,a,g
-e,x,f,n,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,k,s,g
-e,f,f,w,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,k,a,g
-e,f,f,n,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,n,a,g
-e,f,s,w,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,k,s,g
-e,f,f,n,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,n,a,g
-p,f,y,n,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,k,v,u
-e,f,f,w,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,k,a,g
-e,f,s,w,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,n,s,g
-e,x,f,n,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,n,a,g
-e,f,f,w,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,n,s,g
-e,x,f,g,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,k,a,g
-e,f,s,n,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,n,a,g
-e,x,f,g,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,k,v,d
-e,f,f,w,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,n,a,g
-e,f,f,n,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,k,s,g
-e,x,s,n,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,k,s,g
-e,x,f,w,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,k,s,g
-e,b,s,y,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,s,g
-p,x,y,n,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,k,s,g
-p,f,s,n,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,k,v,g
-e,f,f,w,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,k,s,g
-e,f,f,n,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,k,s,g
-p,f,y,n,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,k,s,g
-e,f,f,n,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,k,s,g
-p,x,s,w,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,n,v,u
-e,f,s,n,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,n,a,g
-p,f,y,w,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,n,v,u
-e,f,s,n,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,k,a,g
-e,f,s,g,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,k,a,g
-e,x,f,g,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,k,a,g
-e,f,f,n,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,n,s,g
-e,x,f,g,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,n,v,d
-e,x,f,n,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,k,v,d
-e,x,f,n,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,n,s,g
-e,x,f,g,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,k,y,d
-e,x,s,w,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,n,s,g
-e,f,s,w,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,k,a,g
-e,x,s,w,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,k,s,g
-e,f,f,g,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,k,s,g
-e,x,f,n,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,n,y,d
-e,x,s,w,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,n,s,g
-e,x,s,n,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,k,s,g
-e,f,s,g,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,n,s,g
-p,f,y,w,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,n,s,g
-e,f,s,n,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,k,s,g
-e,f,y,y,t,a,f,c,b,p,e,r,s,y,w,w,p,w,o,p,n,y,g
-e,x,f,g,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,k,y,d
-e,f,s,g,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,n,a,g
-e,x,s,g,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,n,a,g
-p,f,y,w,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,n,s,u
-e,x,s,w,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,k,a,g
-e,x,s,w,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,k,a,g
-e,f,s,g,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,k,a,g
-e,f,s,n,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,n,s,g
-e,x,f,n,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,k,s,g
-e,f,s,n,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,k,s,g
-e,f,s,n,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,n,a,g
-e,x,f,w,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,n,a,g
-e,f,f,w,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,n,a,g
-e,f,f,n,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,k,a,g
-e,x,y,w,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,s,m
-e,x,s,n,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,k,a,g
-e,x,s,g,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,n,s,g
-e,f,s,w,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,k,a,g
-p,x,s,w,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,n,s,u
-e,x,s,w,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,k,s,g
-e,x,f,n,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,n,v,d
-e,f,f,g,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,n,s,g
-e,x,s,g,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,k,a,g
-e,f,f,n,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,n,a,g
-e,f,s,n,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,n,s,g
-e,x,s,g,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,k,s,g
-e,f,y,n,t,l,f,c,b,n,e,r,s,y,w,w,p,w,o,p,k,s,p
-p,f,y,n,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,k,v,u
-e,x,s,n,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,n,s,g
-e,x,f,n,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,k,v,d
-p,f,y,w,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,k,s,g
-p,x,s,w,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,k,s,u
-p,f,s,n,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,n,s,u
-e,x,f,n,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,k,y,d
-e,b,y,w,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,n,m
-p,f,y,w,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,k,s,g
-e,f,f,w,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,k,a,g
-e,x,s,w,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,n,s,g
-e,x,f,n,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,k,y,d
-e,x,s,n,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,k,a,g
-e,f,f,n,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,k,s,g
-e,f,f,n,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,n,s,g
-p,f,y,n,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,n,s,u
-p,f,y,w,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,k,v,g
-e,x,s,n,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,k,s,g
-e,f,f,g,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,n,s,g
-e,x,f,n,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,k,v,d
-e,f,s,g,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,n,s,g
-e,f,f,n,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,k,a,g
-p,f,s,n,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,n,v,u
-e,x,f,n,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,n,a,g
-p,f,y,n,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,n,v,g
-e,x,s,g,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,k,a,g
-e,f,f,w,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,k,s,g
-e,x,s,w,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,n,a,g
-e,x,f,n,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,n,y,d
-e,x,s,n,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,n,s,g
-p,f,s,w,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,n,s,u
-e,x,f,n,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,n,v,d
-e,x,s,w,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,k,s,g
-e,f,s,w,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,k,s,g
-e,x,s,w,t,a,f,w,n,n,t,b,s,s,w,w,p,w,o,p,u,v,d
-e,x,f,n,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,n,s,g
-e,x,y,g,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,k,v,d
-e,x,f,n,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,n,y,d
-e,x,s,y,t,a,f,w,n,p,t,b,s,s,w,w,p,w,o,p,u,v,d
-e,f,f,g,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,k,a,g
-e,x,s,n,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,k,s,g
-e,x,y,e,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,n,y,d
-e,f,s,g,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,k,a,g
-p,f,s,w,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,k,v,u
-e,f,f,w,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,n,s,g
-e,x,f,g,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,n,a,g
-e,f,f,n,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,n,s,g
-e,f,s,n,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,k,a,g
-e,x,f,w,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,n,a,g
-p,f,y,w,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,k,s,g
-e,x,f,w,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,n,s,g
-e,f,s,n,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,n,a,g
-e,x,s,n,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,k,s,g
-e,x,f,g,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,k,s,g
-e,x,s,n,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,k,a,g
-e,f,y,y,t,l,f,c,b,n,e,r,s,y,w,w,p,w,o,p,k,y,g
-p,x,y,n,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,k,v,u
-e,x,f,w,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,k,a,g
-e,f,s,w,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,k,s,g
-e,x,s,n,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,n,s,g
-p,f,y,n,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,n,s,g
-e,f,f,g,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,k,s,g
-p,f,s,w,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,n,s,u
-e,f,s,n,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,k,s,g
-e,x,s,g,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,k,s,g
-e,x,f,g,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,k,a,g
-e,f,f,n,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,k,s,g
-e,f,s,g,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,n,s,g
-e,x,s,w,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,n,s,g
-e,x,f,n,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,k,a,g
-e,x,f,g,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,k,s,g
-e,f,f,n,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,n,a,g
-e,b,y,y,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,n,m
-e,f,f,g,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,n,s,g
-e,x,f,g,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,k,a,g
-e,f,f,w,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,n,a,g
-e,x,f,n,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,n,y,d
-p,f,s,n,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,k,s,u
-e,x,f,n,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,n,y,d
-e,x,f,n,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,n,a,g
-e,x,f,n,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,k,y,d
-e,x,f,w,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,n,a,g
-e,f,s,g,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,k,s,g
-e,f,s,n,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,k,a,g
-e,x,y,y,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,s,m
-e,x,f,w,t,a,f,w,n,p,t,b,s,s,w,w,p,w,o,p,u,v,d
-e,f,f,g,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,n,a,g
-e,f,f,n,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,n,a,g
-e,f,f,n,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,n,s,g
-e,b,y,y,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,s,g
-e,x,f,n,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,k,s,g
-e,x,y,w,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,s,g
-e,f,s,w,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,k,s,g
-e,x,s,w,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,n,a,g
-e,x,f,n,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,n,s,g
-e,x,s,n,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,n,a,g
-e,x,s,n,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,n,s,g
-e,x,y,n,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,n,y,d
-e,f,s,g,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,k,s,g
-e,x,y,y,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,n,m
-e,x,s,w,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,k,a,g
-p,f,y,w,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,k,s,u
-e,f,s,w,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,n,s,g
-p,f,s,w,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,n,s,u
-e,f,s,g,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,k,s,g
-e,f,f,g,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,n,s,g
-p,x,s,w,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,n,s,g
-e,x,s,g,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,k,a,g
-p,x,s,w,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,n,s,u
-e,x,s,g,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,n,s,g
-e,f,s,w,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,k,s,g
-p,f,y,w,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,n,v,g
-e,x,f,w,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,k,a,g
-e,x,s,n,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,k,a,g
-e,x,f,w,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,n,s,g
-e,x,f,w,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,k,a,g
-e,x,y,e,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,k,y,d
-p,f,s,n,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,k,s,u
-e,f,f,g,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,k,s,g
-e,x,y,y,t,a,f,c,b,w,e,r,s,y,w,w,p,w,o,p,k,y,g
-e,x,f,n,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,n,a,g
-e,f,f,n,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,k,a,g
-e,b,s,w,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,n,m
-e,f,s,g,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,k,a,g
-e,f,f,w,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,k,a,g
-e,f,f,n,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,n,a,g
-p,f,s,n,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,n,s,u
-e,x,s,n,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,n,a,g
-e,b,y,y,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,n,g
-e,f,s,g,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,n,a,g
-e,f,s,n,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,k,s,g
-e,x,f,g,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,n,a,g
-e,f,s,g,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,k,s,g
-e,x,f,n,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,k,a,g
-e,x,f,n,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,n,v,d
-e,b,y,w,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,n,m
-e,x,f,g,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,n,v,d
-p,f,y,n,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,k,s,g
-e,x,s,w,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,n,a,g
-p,f,s,n,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,k,v,u
-e,f,s,n,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,n,a,g
-e,f,f,n,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,k,a,g
-e,x,f,w,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,k,a,g
-e,x,f,n,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,n,a,g
-e,f,s,n,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,n,s,g
-e,x,s,y,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,s,g
-e,f,s,w,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,k,a,g
-e,x,s,w,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,k,s,g
-p,x,y,n,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,k,v,g
-e,f,f,g,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,k,a,g
-p,f,y,n,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,k,s,g
-e,f,f,w,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,k,a,g
-e,x,f,n,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,k,v,d
-e,x,f,g,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,k,s,g
-p,f,s,n,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,n,v,u
-e,f,s,g,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,k,s,g
-p,x,s,w,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,n,v,u
-e,x,f,n,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,n,v,d
-e,x,f,n,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,n,s,g
-e,x,y,y,t,a,f,c,b,n,e,r,s,y,w,w,p,w,o,p,n,s,p
-e,f,f,w,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,k,s,g
-e,f,s,w,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,n,a,g
-e,x,f,g,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,n,s,g
-e,f,f,g,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,k,s,g
-e,f,f,w,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,n,a,g
-e,x,f,g,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,k,s,g
-e,f,f,g,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,n,a,g
-e,x,f,g,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,n,v,d
-e,x,s,g,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,k,a,g
-e,f,s,g,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,k,s,g
-e,x,y,n,t,l,f,c,b,w,e,r,s,y,w,w,p,w,o,p,n,s,g
-e,x,y,g,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,k,y,d
-e,f,s,g,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,k,a,g
-e,f,f,w,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,n,s,g
-e,f,s,n,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,n,a,g
-e,f,f,w,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,k,s,g
-e,f,f,n,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,k,a,g
-e,f,s,g,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,k,a,g
-e,x,s,n,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,n,s,g
-e,x,f,n,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,n,y,d
-e,x,s,n,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,n,s,g
-e,x,f,n,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,k,a,g
-e,f,s,w,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,n,a,g
-e,x,f,n,f,n,f,c,n,p,e,e,s,s,w,w,p,w,o,p,k,y,u
-p,f,s,w,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,k,s,u
-p,f,s,n,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,n,v,g
-e,x,f,n,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,k,v,d
-e,x,s,w,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,n,a,g
-e,f,f,g,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,n,a,g
-e,x,s,n,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,n,s,g
-e,x,f,n,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,n,s,g
-e,x,s,g,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,k,s,g
-e,f,f,g,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,k,a,g
-e,f,s,n,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,n,s,g
-e,f,s,g,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,k,a,g
-e,x,f,e,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,k,v,d
-p,f,y,w,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,n,v,u
-e,x,f,n,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,n,s,g
-e,x,f,n,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,n,v,d
-p,f,s,n,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,n,s,u
-p,f,s,w,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,k,v,g
-p,f,s,w,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,k,s,u
-e,f,f,w,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,n,a,g
-e,f,s,n,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,n,s,g
-e,f,f,n,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,k,a,g
-e,x,s,n,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,k,a,g
-e,f,s,n,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,k,a,g
-p,f,s,w,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,n,v,g
-p,f,s,w,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,k,v,g
-e,f,s,g,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,n,a,g
-e,x,s,n,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,k,a,g
-e,x,s,n,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,n,s,g
-e,x,f,n,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,k,v,d
-e,x,f,n,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,n,v,d
-e,f,f,w,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,k,a,g
-e,f,f,w,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,n,s,g
-e,x,f,w,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,k,a,g
-e,x,s,y,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,n,m
-e,f,s,g,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,n,a,g
-e,x,s,w,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,n,s,g
-e,x,f,n,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,n,s,g
-e,x,f,n,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,n,a,g
-e,x,s,w,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,n,s,g
-e,x,s,n,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,k,s,g
-e,x,s,g,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,n,s,g
-e,x,s,g,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,k,s,g
-e,f,s,w,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,n,s,g
-e,x,s,n,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,k,a,g
-e,f,f,g,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,n,s,g
-e,f,f,w,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,k,s,g
-e,x,f,w,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,n,a,g
-e,f,f,g,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,k,s,g
-e,x,f,g,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,k,a,g
-e,x,y,y,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,n,g
-e,x,s,y,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,n,g
-e,x,s,w,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,k,a,g
-p,x,y,n,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,k,s,g
-e,x,f,w,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,n,s,g
-p,x,s,w,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,k,s,u
-e,x,y,g,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,n,v,d
-p,f,s,w,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,k,v,u
-e,x,y,y,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,s,g
-e,x,y,w,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,n,m
-e,f,y,n,t,l,f,c,b,n,e,r,s,y,w,w,p,w,o,p,n,y,p
-e,x,s,w,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,n,s,g
-e,f,s,n,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,k,s,g
-e,x,s,g,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,k,a,g
-e,x,s,w,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,k,a,g
-e,f,s,n,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,n,s,g
-e,f,s,n,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,k,a,g
-e,x,s,g,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,k,s,g
-e,b,s,w,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,n,g
-e,x,s,g,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,k,s,g
-e,x,s,y,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,s,m
-p,f,y,w,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,k,v,g
-e,f,s,w,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,k,s,g
-e,f,s,n,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,k,s,g
-e,f,s,g,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,n,s,g
-e,f,f,g,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,n,s,g
-e,x,y,n,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,n,y,d
-e,x,f,n,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,k,y,d
-e,x,y,y,t,a,f,c,b,n,e,r,s,y,w,w,p,w,o,p,n,y,p
-e,x,f,g,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,k,y,d
-e,f,f,g,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,n,a,g
-e,f,s,n,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,k,s,g
-e,f,s,w,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,n,a,g
-e,x,f,g,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,n,a,g
-e,f,s,w,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,k,s,g
-e,f,f,g,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,n,a,g
-e,x,f,g,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,n,a,g
-e,x,s,n,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,n,a,g
-e,x,f,n,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,k,y,d
-e,f,s,w,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,n,s,g
-e,f,f,n,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,k,s,g
-p,f,y,n,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,k,s,g
-e,x,f,n,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,k,y,d
-e,x,f,n,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,k,s,g
-e,f,f,w,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,k,s,g
-p,f,y,n,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,n,v,u
-e,f,f,g,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,k,s,g
-e,x,f,g,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,k,v,d
-e,x,f,w,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,k,a,g
-e,x,f,g,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,k,s,g
-e,x,f,n,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,n,v,d
-e,x,s,g,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,n,a,g
-e,f,f,w,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,n,a,g
-e,x,f,g,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,k,y,d
-e,x,y,w,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,n,g
-e,x,y,w,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,n,m
-e,f,s,g,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,n,s,g
-e,f,s,w,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,k,a,g
-e,f,s,n,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,k,s,g
-e,f,s,w,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,k,a,g
-e,x,f,n,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,n,a,g
-e,x,s,n,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,n,s,g
-e,b,y,y,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,n,m
-p,f,y,w,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,k,v,g
-e,x,s,n,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,k,a,g
-e,f,s,n,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,k,s,g
-e,x,y,n,t,a,f,c,b,n,e,r,s,y,w,w,p,w,o,p,k,s,g
-e,x,s,g,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,n,a,g
-e,f,s,w,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,n,a,g
-p,x,s,n,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,k,v,u
-e,f,s,g,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,k,a,g
-e,x,f,w,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,n,s,g
-p,f,s,n,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,k,v,g
-e,x,f,n,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,n,a,g
-e,x,s,n,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,k,a,g
-e,x,f,n,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,k,v,d
-e,f,s,w,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,k,s,g
-p,f,s,w,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,n,v,u
-e,x,f,w,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,k,a,g
-e,f,f,n,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,n,s,g
-e,f,s,g,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,n,s,g
-e,x,f,w,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,k,a,g
-e,f,y,n,t,l,f,c,b,w,e,r,s,y,w,w,p,w,o,p,n,y,g
-p,f,s,w,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,n,s,g
-e,x,f,w,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,n,a,g
-e,f,f,n,f,n,f,c,n,p,e,e,s,s,w,w,p,w,o,p,k,y,u
-p,f,y,n,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,n,s,u
-e,f,s,w,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,n,s,g
-e,x,f,g,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,n,s,g
-e,f,y,y,t,a,f,c,b,n,e,r,s,y,w,w,p,w,o,p,n,y,p
-e,x,f,w,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,n,s,g
-e,x,f,w,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,k,a,g
-p,f,y,n,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,n,s,g
-e,f,f,n,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,k,s,g
-e,f,f,g,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,n,s,g
-e,x,f,n,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,k,s,g
-e,x,f,n,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,n,v,d
-e,x,s,g,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,k,s,g
-e,x,s,g,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,k,a,g
-e,x,f,w,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,k,s,g
-e,f,f,g,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,n,s,g
-e,x,f,n,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,k,y,d
-e,x,s,w,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,n,s,g
-p,x,s,w,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,n,s,u
-e,f,s,g,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,n,a,g
-e,x,s,n,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,n,s,g
-e,x,f,n,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,k,v,d
-e,x,y,y,t,a,f,c,b,p,e,r,s,y,w,w,p,w,o,p,n,y,g
-e,x,s,g,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,k,a,g
-e,x,f,n,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,k,a,g
-e,x,f,w,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,k,a,g
-e,x,y,y,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,s,g
-e,x,f,n,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,k,y,d
-e,x,f,w,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,k,a,g
-e,x,f,w,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,k,a,g
-p,f,s,n,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,n,s,u
-p,f,s,w,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,k,v,u
-e,x,s,n,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,k,a,g
-e,x,f,g,f,n,f,c,n,n,e,e,s,s,w,w,p,w,o,p,n,v,u
-e,f,s,n,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,k,s,g
-e,x,s,w,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,k,a,g
-p,f,s,w,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,k,s,g
-e,b,s,y,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,s,g
-e,x,f,n,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,k,a,g
-p,f,y,n,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,k,s,u
-e,x,s,g,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,k,a,g
-e,x,s,g,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,n,s,g
-e,x,s,w,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,k,s,g
-p,f,s,w,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,n,v,u
-e,f,s,w,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,k,a,g
-e,x,f,n,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,n,a,g
-e,f,f,n,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,n,s,g
-e,x,f,n,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,n,a,g
-p,f,y,n,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,k,v,g
-e,x,f,w,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,n,s,g
-e,f,s,w,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,n,a,g
-p,f,y,w,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,k,v,u
-e,f,s,g,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,k,a,g
-e,f,y,n,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,n,v,d
-e,x,f,g,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,n,a,g
-e,x,f,g,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,k,v,d
-e,x,y,n,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,n,v,d
-e,f,f,n,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,n,y,d
-e,f,y,e,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,k,v,d
-e,f,s,g,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,n,s,g
-e,x,f,g,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,n,y,d
-e,x,y,g,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,k,y,d
-e,x,f,g,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,n,s,g
-p,f,s,n,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,n,s,g
-e,f,f,n,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,k,v,d
-p,x,s,n,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,k,v,g
-e,x,s,w,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,s,g
-e,x,s,w,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,k,a,g
-e,x,y,y,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,s,g
-p,x,f,g,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,y,g
-e,f,s,n,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,n,a,g
-e,f,s,w,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,n,s,g
-e,x,f,n,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,k,v,d
-e,f,s,n,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,k,s,g
-e,x,f,n,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,k,s,g
-p,x,s,w,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,k,v,u
-e,f,f,n,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,n,y,d
-e,x,y,w,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,n,g
-e,f,f,g,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,k,s,g
-e,x,y,e,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,n,v,d
-e,f,f,w,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,k,s,g
-e,x,s,g,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,n,a,g
-e,x,y,n,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,k,v,d
-e,x,y,g,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,n,y,d
-e,x,f,g,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,k,v,d
-e,f,s,w,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,n,a,g
-e,x,f,g,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,k,v,d
-e,x,f,n,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,n,v,d
-e,x,f,e,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,n,v,d
-e,x,y,n,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,n,y,d
-e,f,f,w,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,n,s,g
-e,x,y,n,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,k,y,d
-e,x,s,g,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,k,s,g
-e,x,f,g,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,k,a,g
-e,f,f,w,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,n,a,g
-e,x,y,g,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,k,v,d
-p,f,y,n,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,n,s,u
-e,x,y,e,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,k,v,d
-e,f,f,w,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,k,s,g
-e,f,s,g,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,n,a,g
-e,x,y,g,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,n,v,d
-e,f,f,g,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,k,v,d
-e,x,f,n,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,k,v,d
-e,f,f,e,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,k,v,d
-e,x,s,w,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,n,a,g
-e,x,f,e,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,n,v,d
-e,x,f,n,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,k,v,d
-e,x,f,g,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,k,y,d
-e,x,f,n,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,n,y,d
-e,x,f,y,t,a,f,w,n,w,t,b,s,s,w,w,p,w,o,p,n,v,d
-e,x,f,e,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,n,v,d
-p,f,s,w,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,n,v,g
-e,x,s,w,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,k,s,g
-e,x,f,e,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,n,y,d
-e,x,y,n,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,k,v,d
-e,x,f,w,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,k,a,g
-e,x,f,g,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,n,y,d
-e,x,s,g,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,n,s,g
-e,x,y,e,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,n,y,d
-e,x,y,n,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,n,v,d
-e,x,s,w,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,k,a,g
-e,x,s,n,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,k,a,g
-e,x,s,w,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,k,s,g
-e,x,s,n,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,k,s,g
-e,f,f,e,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,n,y,d
-e,f,f,w,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,k,a,g
-e,f,f,g,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,k,a,g
-e,x,y,e,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,n,v,d
-e,x,f,e,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,n,y,d
-e,x,y,g,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,n,v,d
-e,x,f,e,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,k,y,d
-e,f,s,n,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,n,s,g
-e,x,s,w,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,k,a,g
-e,x,s,w,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,k,a,g
-e,x,f,w,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,n,a,g
-e,x,f,e,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,n,v,d
-e,x,f,n,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,n,y,d
-e,f,f,n,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,k,v,d
-e,f,f,w,t,l,f,w,n,p,t,b,s,s,w,w,p,w,o,p,n,v,d
-e,f,f,w,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,k,a,g
-p,f,y,w,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,k,s,g
-e,f,f,g,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,k,v,d
-e,x,f,n,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,n,v,d
-e,x,s,w,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,n,a,g
-e,x,f,w,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,n,a,g
-e,x,s,n,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,n,a,g
-e,x,f,n,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,n,a,g
-e,x,s,g,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,n,a,g
-e,x,y,n,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,n,v,d
-e,x,f,n,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,n,y,d
-e,f,s,g,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,n,s,g
-e,f,s,n,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,k,a,g
-e,x,s,g,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,n,s,g
-e,f,f,g,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,k,y,d
-e,x,s,w,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,n,a,g
-e,f,f,n,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,n,a,g
-e,f,s,w,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,k,s,g
-e,x,f,g,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,n,s,g
-e,f,f,n,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,n,a,g
-e,f,f,n,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,k,a,g
-e,x,f,n,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,n,v,d
-e,f,s,g,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,k,a,g
-e,f,s,w,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,n,s,g
-e,x,y,e,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,k,v,d
-e,x,y,g,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,k,v,d
-e,x,y,e,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,k,v,d
-e,x,f,g,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,k,s,g
-e,f,f,g,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,n,a,g
-e,f,y,y,t,l,f,c,b,p,e,r,s,y,w,w,p,w,o,p,n,y,g
-p,f,y,w,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,k,v,u
-e,f,s,n,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,k,a,g
-e,f,f,n,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,k,s,g
-e,f,f,n,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,k,v,d
-e,x,f,e,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,k,y,d
-e,f,f,g,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,n,a,g
-e,x,f,n,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,k,s,g
-e,x,f,g,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,n,s,g
-e,x,y,e,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,n,v,d
-e,f,f,n,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,k,v,d
-e,x,f,n,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,k,s,g
-e,x,f,n,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,k,a,g
-e,f,s,w,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,k,s,g
-e,f,s,g,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,k,s,g
-p,f,y,w,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,k,s,u
-e,b,s,w,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,s,g
-e,x,f,n,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,n,v,d
-e,f,f,g,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,k,a,g
-e,x,f,n,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,n,s,g
-e,x,s,n,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,n,s,g
-e,f,s,w,t,l,f,w,n,n,t,b,s,s,w,w,p,w,o,p,u,v,d
-e,x,f,w,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,n,a,g
-e,x,s,w,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,k,a,g
-e,x,s,g,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,k,s,g
-e,x,y,e,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,k,v,d
-e,f,f,n,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,k,v,d
-e,x,f,n,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,k,v,d
-e,f,f,w,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,k,s,g
-e,f,s,n,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,n,s,g
-p,f,s,w,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,n,v,u
-e,f,f,n,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,n,s,g
-e,x,y,g,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,n,y,d
-e,x,y,g,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,n,v,d
-e,x,f,g,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,n,a,g
-e,x,f,g,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,n,v,d
-e,x,y,e,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,n,v,d
-e,x,f,g,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,k,y,d
-e,x,f,g,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,n,v,d
-e,x,f,n,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,k,v,d
-p,x,s,w,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,k,v,g
-e,x,f,n,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,k,s,g
-e,x,y,n,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,n,v,d
-e,x,f,g,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,n,y,d
-p,f,y,n,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,n,s,g
-e,x,f,g,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,k,y,d
-e,x,f,g,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,k,s,g
-e,x,f,e,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,k,v,d
-e,f,f,n,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,n,s,g
-e,f,s,w,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,n,a,g
-e,f,s,n,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,k,s,g
-e,x,y,e,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,n,v,d
-e,f,s,n,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,k,s,g
-e,f,f,n,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,n,v,d
-e,f,s,w,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,k,s,g
-e,x,f,n,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,k,a,g
-e,f,f,n,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,n,s,g
-p,x,s,n,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,k,v,u
-e,x,f,n,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,k,y,d
-e,f,s,n,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,n,s,g
-p,f,y,w,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,n,s,g
-e,x,f,n,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,n,v,d
-e,x,f,g,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,n,y,d
-e,f,f,n,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,n,a,g
-e,x,f,e,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,k,v,d
-e,f,f,w,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,n,s,g
-e,x,f,w,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,k,s,g
-e,x,y,w,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,n,m
-e,x,s,w,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,n,s,g
-e,x,y,g,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,n,v,d
-e,x,f,g,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,k,y,d
-e,x,s,g,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,n,a,g
-p,f,y,w,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,n,s,u
-e,x,f,e,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,n,v,d
-p,f,s,n,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,k,s,u
-p,f,y,n,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,n,v,g
-e,x,f,g,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,n,v,d
-e,f,s,n,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,n,a,g
-e,x,f,g,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,n,y,d
-e,x,f,n,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,k,a,g
-e,x,f,n,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,n,y,d
-e,f,s,n,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,n,s,g
-e,f,f,n,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,k,a,g
-e,x,s,n,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,k,a,g
-e,x,s,n,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,k,s,g
-e,x,f,n,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,n,v,d
-e,x,y,e,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,n,v,d
-e,f,s,n,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,k,a,g
-e,x,f,n,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,k,y,d
-p,f,y,w,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,n,v,u
-e,x,y,y,t,l,f,c,b,n,e,r,s,y,w,w,p,w,o,p,n,s,g
-p,f,s,n,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,k,s,g
-e,f,f,w,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,n,s,g
-e,f,f,n,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,k,a,g
-p,f,s,n,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,n,v,g
-e,x,s,n,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,n,a,g
-e,x,f,n,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,k,s,g
-e,x,f,e,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,k,v,d
-e,f,f,n,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,k,a,g
-e,x,f,w,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,n,s,g
-e,f,s,g,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,n,a,g
-e,f,f,w,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,k,s,g
-e,f,s,y,t,a,f,w,n,n,t,b,s,s,w,w,p,w,o,p,n,v,d
-e,f,f,w,t,a,f,w,n,n,t,b,s,s,w,w,p,w,o,p,n,v,d
-e,f,f,n,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,n,a,g
-e,x,f,n,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,n,y,d
-e,x,y,g,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,k,y,d
-e,x,f,n,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,n,y,d
-e,x,f,n,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,k,s,g
-e,x,f,g,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,n,y,d
-e,x,f,n,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,k,v,d
-e,x,f,g,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,k,a,g
-e,x,f,n,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,n,v,d
-e,x,s,n,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,k,a,g
-e,b,y,w,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,s,m
-e,x,y,e,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,k,v,d
-e,f,s,n,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,n,s,g
-e,x,f,e,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,k,y,d
-e,f,y,n,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,k,y,d
-e,x,y,g,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,k,v,d
-e,x,s,g,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,n,a,g
-e,x,s,n,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,k,s,g
-e,x,f,g,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,n,a,g
-e,x,s,g,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,k,a,g
-e,x,y,e,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,k,y,d
-e,f,f,g,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,n,y,d
-e,f,f,n,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,k,y,d
-e,x,f,n,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,n,s,g
-e,f,s,n,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,n,s,g
-e,x,f,w,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,k,s,g
-e,x,y,y,t,a,f,c,b,p,e,r,s,y,w,w,p,w,o,p,n,s,p
-e,x,f,e,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,n,v,d
-e,x,f,n,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,k,s,g
-e,x,y,n,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,k,v,d
-e,f,f,g,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,n,y,d
-e,x,f,w,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,n,a,g
-e,x,y,w,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,s,m
-e,x,s,n,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,n,s,g
-e,f,s,w,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,n,a,g
-e,f,f,n,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,n,v,d
-e,f,f,w,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,k,s,g
-e,x,f,g,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,n,v,d
-e,x,f,n,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,n,y,d
-e,x,f,w,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,n,a,g
-e,x,s,w,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,n,m
-e,f,s,g,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,n,s,g
-p,f,y,n,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,n,v,g
-e,f,f,g,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,n,s,g
-e,x,f,n,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,n,v,d
-e,x,f,w,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,n,s,g
-e,x,y,n,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,k,v,d
-e,x,y,e,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,n,v,d
-p,f,s,n,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,k,v,u
-e,f,s,g,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,n,a,g
-e,x,f,g,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,k,y,d
-e,f,f,g,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,k,y,d
-e,x,f,g,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,k,v,d
-e,x,f,g,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,n,y,d
-e,x,f,n,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,k,y,d
-p,x,s,w,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,n,v,u
-e,x,s,y,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,n,m
-e,x,f,n,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,n,v,d
-e,f,y,n,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,n,y,d
-e,x,s,g,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,n,s,g
-e,x,y,e,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,n,y,d
-e,x,f,g,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,n,s,g
-e,f,s,w,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,n,s,g
-e,x,f,n,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,n,v,d
-e,f,f,n,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,n,v,d
-e,f,f,g,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,k,s,g
-e,f,f,g,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,k,s,g
-e,f,f,n,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,n,s,g
-e,x,f,g,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,k,y,d
-e,x,f,e,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,n,y,d
-p,f,y,n,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,k,s,u
-e,x,y,g,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,k,v,d
-e,x,s,w,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,n,a,g
-e,b,s,w,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,s,m
-e,x,s,w,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,n,s,g
-e,f,f,n,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,k,s,g
-e,x,y,g,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,k,y,d
-e,f,f,g,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,n,v,d
-e,x,f,g,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,k,s,g
-p,x,s,w,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,k,s,g
-e,x,s,n,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,n,a,g
-e,x,y,g,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,k,y,d
-e,f,f,g,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,n,v,d
-e,x,y,n,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,n,v,d
-e,x,f,e,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,n,y,d
-e,f,f,n,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,n,y,d
-e,x,f,e,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,k,y,d
-e,f,f,n,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,n,v,d
-e,x,y,e,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,k,y,d
-e,x,f,g,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,k,v,d
-e,f,f,g,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,k,v,d
-e,x,f,e,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,k,v,d
-e,x,y,e,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,n,v,d
-p,x,s,w,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,n,v,g
-e,x,f,e,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,n,v,d
-e,x,y,g,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,k,v,d
-e,x,f,n,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,n,y,d
-e,x,y,e,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,k,y,d
-e,x,y,g,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,k,v,d
-e,x,f,g,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,k,v,d
-e,x,y,g,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,k,v,d
-e,x,f,e,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,n,y,d
-e,f,y,e,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,n,y,d
-e,f,f,n,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,k,v,d
-e,x,f,n,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,k,v,d
-e,f,y,g,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,k,v,d
-e,x,y,e,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,n,v,d
-e,x,f,g,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,n,y,d
-e,x,y,g,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,k,v,d
-p,x,f,g,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,y,g
-p,f,s,w,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,k,s,g
-e,x,f,e,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,k,v,d
-e,x,f,w,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,n,s,g
-e,x,y,n,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,n,v,d
-e,f,f,n,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,n,y,d
-e,x,f,n,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,n,v,d
-e,x,y,g,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,n,y,d
-e,x,y,g,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,n,v,d
-e,x,f,e,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,k,v,d
-e,x,y,n,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,n,y,d
-e,x,f,g,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,k,y,d
-e,x,f,e,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,k,v,d
-e,x,y,e,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,k,v,d
-e,x,y,n,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,n,y,d
-e,x,y,e,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,k,y,d
-e,f,y,n,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,n,v,d
-e,f,y,n,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,k,v,d
-e,x,y,n,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,n,v,d
-p,f,s,n,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,n,v,u
-e,f,f,e,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,n,y,d
-p,x,s,w,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,k,s,g
-e,x,f,e,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,k,y,d
-e,f,f,n,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,k,y,d
-e,x,y,n,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,n,v,d
-e,f,f,e,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,n,v,d
-e,x,f,e,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,n,y,d
-e,x,f,g,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,n,v,d
-e,x,y,n,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,n,y,d
-e,f,f,n,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,n,v,d
-e,f,f,n,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,n,y,d
-e,x,f,e,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,k,v,d
-e,x,f,n,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,k,y,d
-e,x,f,e,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,n,y,d
-e,f,f,g,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,k,v,d
-e,x,y,n,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,k,y,d
-e,f,f,n,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,n,y,d
-e,x,f,e,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,n,y,d
-e,x,y,e,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,n,y,d
-e,x,f,g,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,k,y,d
-e,x,y,g,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,k,y,d
-e,x,y,e,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,n,v,d
-e,x,y,g,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,k,v,d
-e,x,f,g,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,n,v,d
-e,f,f,g,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,n,v,d
-e,x,y,g,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,k,v,d
-e,x,y,e,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,n,y,d
-e,f,f,n,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,k,v,d
-e,f,f,n,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,k,y,d
-e,x,y,e,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,n,v,d
-p,x,f,g,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,y,d
-e,x,y,g,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,k,y,d
-e,x,f,e,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,n,y,d
-e,x,y,n,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,n,v,d
-e,f,f,n,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,n,y,d
-e,x,y,e,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,k,v,d
-e,f,f,n,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,k,v,d
-e,f,y,n,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,k,y,d
-e,f,f,g,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,k,y,d
-e,x,f,e,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,k,y,d
-e,x,f,g,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,k,v,d
-e,x,f,g,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,n,v,d
-e,x,f,n,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,k,v,d
-e,x,f,g,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,k,y,d
-e,x,s,g,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,n,s,g
-e,x,y,e,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,k,y,d
-e,f,f,g,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,k,y,d
-e,x,y,e,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,n,v,d
-e,f,f,g,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,k,v,d
-e,x,f,g,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,k,y,d
-e,x,f,g,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,n,v,d
-e,f,f,g,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,n,y,d
-e,x,s,w,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,n,a,g
-e,x,f,n,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,n,y,d
-e,f,f,n,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,k,v,d
-e,f,f,g,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,k,y,d
-e,x,y,g,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,k,y,d
-e,x,f,e,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,n,v,d
-e,x,y,e,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,k,v,d
-e,x,f,n,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,k,y,d
-e,f,y,g,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,k,y,d
-e,x,y,g,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,n,v,d
-p,x,s,p,f,c,f,w,n,g,e,b,s,s,w,w,p,w,o,p,n,s,d
-e,x,y,n,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,n,y,d
-e,f,f,g,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,k,v,d
-e,x,y,e,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,k,y,d
-e,f,f,n,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,n,v,d
-e,x,y,n,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,n,v,d
-e,x,f,g,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,n,y,d
-e,f,f,n,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,n,v,d
-e,x,y,n,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,k,v,d
-e,x,y,n,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,k,v,d
-e,x,f,e,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,n,v,d
-e,f,f,g,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,k,v,d
-e,x,y,e,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,n,v,d
-e,x,y,e,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,n,v,d
-e,f,f,n,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,n,y,d
-e,x,f,e,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,k,v,d
-e,x,f,e,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,n,y,d
-e,x,f,w,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,n,s,g
-e,x,y,g,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,k,y,d
-e,x,f,e,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,k,v,d
-e,x,y,g,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,n,v,d
-e,x,f,e,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,k,v,d
-e,x,f,g,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,n,y,d
-e,f,y,e,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,n,v,d
-e,f,y,g,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,k,v,d
-e,x,y,e,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,k,y,d
-e,x,y,g,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,n,y,d
-e,f,f,g,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,k,s,g
-p,x,f,g,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,y,p
-e,x,f,e,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,k,y,d
-e,x,y,e,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,n,y,d
-p,f,s,w,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,k,v,g
-e,f,y,n,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,k,y,d
-e,x,f,e,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,n,y,d
-e,x,y,e,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,k,v,d
-e,x,f,g,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,k,v,d
-e,x,y,g,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,n,v,d
-e,x,f,g,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,n,a,g
-e,x,f,n,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,n,y,d
-e,f,f,e,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,k,y,d
-e,x,y,g,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,n,y,d
-e,f,f,n,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,k,y,d
-e,f,f,g,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,n,s,g
-e,f,f,g,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,n,v,d
-e,x,y,n,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,k,v,d
-e,f,f,g,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,k,y,d
-e,f,y,n,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,n,y,d
-e,x,f,g,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,n,v,d
-e,x,y,e,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,n,v,d
-e,x,y,n,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,n,v,d
-e,f,s,g,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,n,a,g
-e,x,f,n,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,k,y,d
-e,f,f,n,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,k,y,d
-e,x,y,e,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,n,y,d
-e,x,y,e,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,k,y,d
-e,x,y,n,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,k,y,d
-e,f,s,w,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,k,a,g
-e,x,y,n,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,n,v,d
-e,x,y,e,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,k,y,d
-e,x,f,g,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,n,v,d
-e,f,f,g,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,n,v,d
-e,f,f,n,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,k,s,g
-e,f,f,g,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,k,v,d
-e,x,y,e,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,k,y,d
-e,f,f,g,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,k,v,d
-e,x,y,n,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,n,y,d
-e,x,f,e,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,k,y,d
-e,x,f,n,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,n,y,d
-e,x,f,g,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,n,v,d
-e,f,f,g,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,n,v,d
-e,x,f,n,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,k,y,d
-e,x,y,n,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,n,y,d
-e,x,f,e,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,n,v,d
-e,f,f,g,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,k,y,d
-e,x,y,n,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,k,y,d
-p,x,s,p,f,c,f,w,n,n,e,b,s,s,w,w,p,w,o,p,k,v,d
-e,f,y,g,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,k,y,d
-e,f,f,n,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,n,y,d
-p,f,s,n,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,k,s,u
-e,x,y,n,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,k,y,d
-e,x,y,g,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,k,y,d
-e,x,y,e,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,k,v,d
-e,x,y,e,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,n,v,d
-e,x,y,e,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,k,v,d
-e,x,f,g,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,n,y,d
-e,f,s,w,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,k,s,g
-e,x,y,g,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,n,v,d
-e,x,f,n,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,n,s,g
-e,x,f,n,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,n,y,d
-e,x,y,g,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,k,v,d
-e,x,y,e,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,n,v,d
-e,x,y,n,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,k,v,d
-e,x,f,g,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,n,v,d
-e,x,f,g,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,k,y,d
-e,x,y,g,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,k,v,d
-e,x,y,g,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,n,y,d
-e,f,y,g,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,n,v,d
-e,x,f,e,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,k,v,d
-e,x,y,e,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,k,y,d
-e,x,f,n,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,n,v,d
-e,x,f,n,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,n,y,d
-e,f,f,n,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,k,v,d
-e,f,f,n,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,n,y,d
-e,x,y,g,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,n,v,d
-e,x,f,e,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,k,y,d
-e,f,y,n,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,k,v,d
-e,x,y,e,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,k,v,d
-e,f,y,n,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,k,y,d
-e,x,y,g,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,k,y,d
-e,f,f,n,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,n,v,d
-e,x,y,g,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,k,y,d
-e,x,y,e,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,k,v,d
-e,x,y,n,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,k,y,d
-e,x,f,g,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,k,y,d
-e,x,f,e,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,n,y,d
-e,x,f,n,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,k,v,d
-e,f,f,n,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,n,y,d
-e,f,y,g,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,n,v,d
-e,x,f,n,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,k,v,d
-e,x,y,e,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,k,y,d
-e,f,f,n,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,k,v,d
-e,x,y,n,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,k,v,d
-e,f,f,n,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,k,y,d
-e,x,y,e,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,n,y,d
-e,x,y,e,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,n,y,d
-e,x,f,g,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,n,v,d
-e,x,f,g,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,n,v,d
-e,f,f,e,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,n,v,d
-e,x,f,n,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,n,y,d
-e,x,y,e,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,n,y,d
-e,x,y,g,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,n,v,d
-e,x,y,g,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,k,y,d
-e,x,f,e,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,n,v,d
-e,f,f,n,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,n,v,d
-e,x,y,n,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,n,y,d
-e,x,y,g,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,k,v,d
-e,x,f,w,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,n,s,g
-e,f,y,g,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,n,y,d
-e,f,s,g,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,n,s,g
-e,f,f,n,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,n,y,d
-e,x,y,e,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,k,y,d
-p,f,y,w,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,n,s,u
-e,x,y,g,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,k,v,d
-e,f,f,n,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,k,y,d
-e,x,f,g,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,n,y,d
-e,x,f,e,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,k,v,d
-e,f,f,g,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,k,v,d
-e,x,y,n,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,n,y,d
-e,x,y,e,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,k,y,d
-e,x,y,n,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,n,v,d
-e,x,y,n,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,n,y,d
-e,f,f,n,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,k,y,d
-e,x,y,g,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,k,y,d
-e,x,y,n,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,k,y,d
-e,x,f,e,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,k,v,d
-e,f,f,n,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,k,y,d
-e,x,f,e,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,k,v,d
-e,f,s,w,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,n,a,g
-e,x,y,n,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,k,v,d
-e,x,f,g,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,k,y,d
-e,x,f,n,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,k,y,d
-e,x,f,e,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,n,y,d
-e,f,f,n,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,k,y,d
-e,x,f,n,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,n,v,d
-e,x,y,n,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,n,y,d
-p,f,s,w,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,n,v,u
-e,x,f,g,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,k,v,d
-e,f,s,w,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,k,s,g
-e,x,s,g,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,n,s,g
-e,x,f,g,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,k,v,d
-e,f,f,n,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,k,y,d
-e,x,f,e,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,k,v,d
-e,x,y,g,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,k,v,d
-e,f,f,n,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,k,y,d
-e,x,f,n,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,n,v,d
-p,x,f,g,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,y,d
-e,x,y,e,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,k,y,d
-e,x,f,n,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,k,a,g
-p,x,s,w,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,n,v,g
-e,x,y,g,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,n,y,d
-e,f,f,n,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,n,v,d
-e,x,f,n,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,k,y,d
-e,x,y,g,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,k,v,d
-e,x,y,g,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,k,v,d
-e,f,f,n,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,n,v,d
-e,f,f,n,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,k,y,d
-e,x,f,g,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,k,v,d
-e,f,f,g,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,n,y,d
-e,x,y,e,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,n,y,d
-e,x,f,e,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,k,y,d
-p,x,s,w,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,k,v,g
-e,f,f,g,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,k,v,d
-e,x,f,g,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,k,v,d
-e,f,y,e,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,k,v,d
-e,f,y,g,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,k,v,d
-e,x,y,n,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,n,v,d
-e,x,y,e,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,n,y,d
-e,x,f,e,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,n,y,d
-e,x,y,e,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,k,y,d
-e,f,f,g,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,n,y,d
-e,x,f,g,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,k,v,d
-e,f,f,n,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,n,v,d
-e,x,y,n,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,n,v,d
-e,x,y,g,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,k,v,d
-e,x,f,g,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,n,v,d
-e,x,y,n,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,k,y,d
-e,f,f,n,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,n,v,d
-e,x,y,n,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,k,y,d
-e,x,y,n,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,k,y,d
-e,f,f,n,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,k,v,d
-e,x,f,n,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,n,y,d
-e,x,f,n,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,n,v,d
-e,x,y,n,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,k,y,d
-e,f,f,g,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,n,y,d
-e,x,f,e,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,n,y,d
-e,f,f,n,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,n,v,d
-p,x,s,p,f,c,f,c,n,g,e,b,s,s,w,w,p,w,o,p,k,v,d
-e,x,f,e,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,n,y,d
-e,f,f,n,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,k,y,d
-e,x,f,n,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,k,y,d
-e,f,f,e,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,n,v,d
-e,x,y,e,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,k,y,d
-e,x,f,g,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,n,y,d
-e,x,f,g,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,n,y,d
-e,x,f,g,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,k,v,d
-e,x,f,g,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,n,y,d
-e,f,f,g,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,n,y,d
-e,x,f,e,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,n,v,d
-e,f,y,n,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,k,y,d
-e,f,y,e,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,k,v,d
-e,x,y,g,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,n,y,d
-e,x,y,g,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,k,y,d
-e,x,f,g,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,k,y,d
-p,x,f,g,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,y,g
-e,x,y,n,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,k,v,d
-e,x,y,e,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,n,y,d
-e,x,f,n,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,k,v,d
-e,f,f,n,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,n,y,d
-e,f,f,n,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,k,v,d
-e,f,f,g,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,k,y,d
-e,f,f,g,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,n,y,d
-e,x,y,g,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,n,y,d
-e,x,f,g,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,n,a,g
-e,x,y,g,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,k,y,d
-e,x,f,e,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,k,y,d
-e,f,f,n,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,k,v,d
-e,x,y,e,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,n,y,d
-e,x,f,g,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,n,v,d
-e,x,f,n,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,k,y,d
-e,x,y,e,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,n,v,d
-e,x,f,n,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,k,v,d
-e,f,f,g,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,k,y,d
-e,x,y,g,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,k,y,d
-e,x,y,g,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,k,v,d
-e,f,f,n,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,n,y,d
-e,x,f,g,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,k,v,d
-e,f,f,n,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,n,v,d
-e,x,y,e,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,k,v,d
-e,x,f,n,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,k,y,d
-e,x,y,g,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,k,v,d
-e,f,f,g,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,k,v,d
-e,x,f,g,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,n,v,d
-e,x,f,n,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,k,v,d
-e,x,y,g,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,n,v,d
-e,x,y,g,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,n,y,d
-e,x,y,e,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,n,v,d
-e,x,y,n,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,k,v,d
-e,x,y,g,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,k,y,d
-p,f,y,n,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,k,s,u
-e,x,s,g,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,n,s,g
-e,x,f,g,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,k,v,d
-e,x,f,e,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,k,y,d
-p,x,s,p,f,c,f,w,n,u,e,b,s,s,w,w,p,w,o,p,n,s,d
-e,x,y,n,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,k,y,d
-e,x,f,e,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,k,v,d
-e,x,y,n,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,n,y,d
-e,x,y,e,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,k,v,d
-e,x,s,w,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,n,s,g
-e,f,f,g,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,n,v,d
-e,x,y,g,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,n,v,d
-e,x,y,g,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,n,v,d
-e,x,f,e,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,n,y,d
-e,x,y,g,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,k,v,d
-e,x,y,g,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,n,v,d
-e,x,f,e,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,k,y,d
-p,f,y,w,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,n,v,u
-e,f,f,n,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,n,y,d
-e,x,f,e,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,k,y,d
-e,x,f,e,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,n,v,d
-e,x,y,g,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,k,v,d
-e,f,s,w,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,k,s,g
-e,x,f,n,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,n,a,g
-e,x,f,n,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,k,v,d
-e,f,f,n,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,n,v,d
-e,x,y,e,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,n,y,d
-e,x,y,e,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,n,y,d
-e,x,y,e,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,n,y,d
-e,f,f,n,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,k,y,d
-e,x,f,g,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,k,v,d
-e,f,f,n,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,k,v,d
-e,f,f,e,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,n,y,d
-e,x,y,e,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,n,v,d
-p,x,s,w,f,c,f,w,n,p,e,b,s,s,w,w,p,w,o,p,n,s,d
-e,f,f,n,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,k,y,d
-e,f,f,g,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,n,y,d
-e,f,f,n,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,k,v,d
-e,x,y,n,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,k,v,d
-e,x,f,e,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,k,y,d
-e,f,s,w,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,n,s,g
-e,x,y,g,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,n,y,d
-e,x,f,e,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,k,v,d
-e,f,f,n,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,n,v,d
-e,f,f,n,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,k,v,d
-e,x,y,n,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,k,y,d
-e,x,y,g,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,n,y,d
-e,x,y,n,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,k,v,d
-e,x,f,g,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,n,y,d
-e,x,y,n,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,k,y,d
-e,x,y,n,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,k,v,d
-e,f,f,n,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,n,y,d
-e,x,y,g,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,n,v,d
-e,x,y,e,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,k,y,d
-e,x,y,n,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,k,y,d
-p,x,f,g,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,y,p
-e,f,f,g,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,k,v,d
-e,x,y,e,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,k,v,d
-p,x,f,g,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,v,d
-e,x,y,e,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,k,v,d
-e,x,f,e,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,k,v,d
-p,x,f,g,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,y,d
-p,x,f,p,f,c,f,w,n,n,e,b,s,s,w,w,p,w,o,p,n,s,d
-e,x,f,g,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,k,v,d
-e,f,f,g,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,n,y,d
-e,x,f,g,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,n,v,d
-e,x,y,g,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,k,y,d
-e,x,y,g,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,n,v,d
-e,f,f,g,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,n,y,d
-e,x,f,g,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,n,y,d
-e,x,f,e,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,k,v,d
-e,x,f,n,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,k,y,d
-e,x,y,n,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,k,v,d
-e,f,f,n,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,k,y,d
-e,f,f,n,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,n,v,d
-e,f,f,g,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,k,y,d
-e,x,f,g,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,k,y,d
-e,x,y,n,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,n,v,d
-e,x,y,e,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,n,v,d
-e,f,f,n,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,n,v,d
-e,x,y,g,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,n,v,d
-e,x,y,n,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,k,y,d
-p,x,f,w,f,c,f,c,n,g,e,b,s,s,w,w,p,w,o,p,k,v,d
-e,f,f,n,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,k,y,d
-e,x,y,e,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,n,v,d
-e,x,y,n,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,n,y,d
-e,f,f,g,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,k,v,d
-e,x,f,e,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,k,y,d
-p,x,f,g,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,y,d
-e,x,y,e,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,k,y,d
-e,f,f,g,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,n,v,d
-e,f,y,e,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,k,v,d
-e,x,f,e,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,n,v,d
-p,x,f,g,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,v,g
-e,x,y,n,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,n,y,d
-e,x,y,n,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,k,v,d
-e,x,y,n,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,k,v,d
-e,x,y,n,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,k,y,d
-e,x,y,g,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,n,y,d
-e,x,y,n,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,k,y,d
-e,x,f,g,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,k,y,d
-e,x,y,e,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,k,y,d
-e,x,y,e,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,k,v,d
-e,x,y,g,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,n,v,d
-e,f,f,e,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,n,v,d
-e,f,f,n,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,k,v,d
-e,f,f,n,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,n,v,d
-e,x,y,g,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,n,v,d
-e,x,y,g,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,n,y,d
-e,x,f,e,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,k,v,d
-e,f,f,n,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,n,v,d
-e,f,f,n,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,n,v,d
-e,x,y,e,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,k,y,d
-e,x,y,n,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,n,v,d
-e,f,f,n,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,k,y,d
-e,f,f,n,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,n,v,d
-p,x,f,g,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,v,p
-e,x,f,g,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,k,y,d
-e,x,y,n,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,n,y,d
-e,x,f,g,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,n,y,d
-e,x,f,g,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,n,y,d
-e,x,y,g,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,k,v,d
-e,f,f,n,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,k,y,d
-e,f,f,g,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,k,y,d
-e,x,y,n,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,k,v,d
-e,x,f,e,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,k,y,d
-e,f,f,n,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,n,v,d
-p,x,f,g,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,v,p
-e,x,f,e,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,n,v,d
-e,f,f,n,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,n,v,d
-e,x,f,e,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,k,y,d
-e,x,f,e,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,n,v,d
-e,x,y,e,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,k,y,d
-e,f,y,g,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,n,v,d
-e,f,f,n,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,n,y,d
-e,x,f,g,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,n,y,d
-e,f,f,g,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,k,v,d
-e,x,y,n,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,k,y,d
-e,x,y,g,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,n,y,d
-e,x,y,g,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,n,y,d
-e,x,f,n,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,n,y,d
-e,x,y,e,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,k,y,d
-e,f,f,n,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,n,v,d
-e,x,y,e,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,n,v,d
-e,x,f,n,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,n,y,d
-e,f,f,n,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,k,y,d
-e,x,y,e,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,n,v,d
-e,x,f,g,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,n,v,d
-e,x,y,n,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,n,v,d
-p,x,f,g,f,c,f,c,n,g,e,b,s,s,w,w,p,w,o,p,n,v,d
-e,f,f,g,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,n,v,d
-e,x,f,n,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,n,y,d
-e,f,s,n,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,k,a,g
-e,f,f,n,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,n,v,d
-e,x,y,g,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,n,y,d
-e,f,y,g,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,n,v,d
-e,x,y,g,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,n,v,d
-e,f,y,e,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,n,v,d
-e,x,y,g,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,n,y,d
-e,x,f,g,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,n,v,d
-e,f,f,g,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,n,v,d
-e,x,y,n,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,k,v,d
-e,f,f,n,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,k,y,d
-e,x,y,e,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,n,v,d
-e,x,y,e,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,k,v,d
-e,f,y,n,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,k,v,d
-e,x,y,n,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,n,y,d
-e,x,f,g,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,n,y,d
-e,f,f,g,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,k,y,d
-e,f,f,w,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,k,s,g
-e,x,y,n,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,n,v,d
-e,x,f,g,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,k,y,d
-e,f,y,e,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,n,y,d
-e,x,f,e,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,k,v,d
-e,x,y,n,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,n,y,d
-e,f,y,n,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,k,y,d
-e,x,y,g,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,k,v,d
-e,f,f,n,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,k,v,d
-e,x,y,e,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,k,y,d
-e,x,y,e,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,k,v,d
-e,x,y,e,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,k,v,d
-e,x,f,e,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,n,y,d
-e,x,f,e,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,n,v,d
-e,x,f,e,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,n,v,d
-e,f,f,n,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,n,y,d
-e,x,f,g,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,k,v,d
-e,x,f,g,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,k,v,d
-e,x,y,g,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,k,y,d
-e,x,y,n,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,n,v,d
-e,x,f,e,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,k,y,d
-e,x,f,e,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,n,y,d
-e,x,f,g,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,n,y,d
-e,x,f,e,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,n,v,d
-e,f,f,n,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,n,y,d
-e,x,f,e,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,n,v,d
-e,f,f,n,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,n,y,d
-e,f,f,n,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,k,v,d
-e,f,f,n,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,k,v,d
-e,x,f,g,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,k,v,d
-e,x,f,e,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,n,v,d
-e,x,y,e,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,n,y,d
-e,f,f,g,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,k,y,d
-e,f,f,n,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,n,v,d
-e,x,y,n,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,n,y,d
-e,f,f,n,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,k,v,d
-p,x,f,p,f,c,f,c,n,p,e,b,s,s,w,w,p,w,o,p,n,v,d
-e,x,f,e,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,k,y,d
-e,x,f,n,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,n,v,d
-e,x,y,e,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,n,y,d
-e,f,f,n,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,n,v,d
-e,f,f,n,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,k,y,d
-e,x,y,g,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,k,v,d
-e,f,s,n,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,n,a,g
-e,x,y,g,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,k,y,d
-e,x,y,n,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,n,v,d
-e,x,y,n,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,k,v,d
-e,f,f,g,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,n,v,d
-e,x,f,e,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,n,y,d
-e,f,f,n,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,k,y,d
-p,x,f,g,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,y,g
-e,x,y,e,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,n,v,d
-e,x,f,g,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,k,v,d
-e,x,f,e,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,n,v,d
-e,x,f,e,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,k,y,d
-e,x,y,e,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,k,v,d
-e,f,f,n,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,n,y,d
-e,f,f,n,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,k,v,d
-e,x,y,n,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,k,v,d
-e,x,f,e,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,n,y,d
-e,x,y,e,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,k,v,d
-e,x,y,n,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,k,y,d
-e,x,y,g,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,k,v,d
-e,x,y,n,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,k,v,d
-e,x,y,e,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,n,y,d
-e,x,f,e,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,n,y,d
-e,x,f,e,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,k,y,d
-e,x,y,e,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,n,v,d
-e,x,y,n,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,k,y,d
-e,x,f,e,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,n,y,d
-e,f,f,e,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,n,v,d
-e,f,f,n,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,k,y,d
-e,x,y,e,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,k,v,d
-e,x,f,n,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,n,v,d
-e,x,y,e,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,n,y,d
-e,f,f,g,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,n,y,d
-e,x,y,n,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,n,v,d
-e,f,y,e,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,k,v,d
-e,x,y,n,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,k,y,d
-e,x,y,e,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,n,y,d
-e,x,y,g,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,n,y,d
-e,f,y,g,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,k,v,d
-e,x,y,n,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,n,y,d
-e,x,s,g,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,k,s,g
-e,f,f,n,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,n,y,d
-e,x,y,g,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,n,y,d
-e,x,y,g,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,k,y,d
-e,x,f,n,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,k,y,d
-e,x,f,e,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,n,v,d
-e,x,y,e,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,k,y,d
-e,x,y,n,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,k,y,d
-e,x,f,e,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,k,y,d
-e,x,y,e,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,k,y,d
-e,x,y,n,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,n,y,d
-e,f,f,g,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,n,y,d
-e,x,f,e,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,k,y,d
-e,f,y,n,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,k,v,d
-e,x,y,n,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,n,v,d
-e,x,f,e,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,n,y,d
-e,x,f,g,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,k,v,d
-e,x,y,n,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,k,y,d
-e,x,y,g,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,n,y,d
-p,x,f,g,f,c,f,c,n,p,e,b,s,s,w,w,p,w,o,p,k,v,d
-e,x,y,n,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,n,y,d
-e,x,f,g,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,n,y,d
-e,x,y,n,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,k,y,d
-e,x,y,g,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,k,v,d
-e,x,y,n,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,n,v,d
-e,f,f,n,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,n,v,d
-e,f,f,n,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,k,y,d
-e,x,y,n,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,n,v,d
-e,f,f,n,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,k,y,d
-e,f,f,n,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,n,y,d
-e,f,y,g,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,k,y,d
-e,x,f,g,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,k,y,d
-e,x,y,g,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,k,y,d
-e,x,y,g,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,n,v,d
-e,f,f,n,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,n,v,d
-e,x,f,g,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,n,v,d
-p,x,f,g,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,v,d
-e,f,f,n,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,n,v,d
-e,x,y,g,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,k,y,d
-e,x,f,g,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,n,y,d
-p,x,f,g,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,y,d
-e,f,f,n,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,n,v,d
-e,x,y,e,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,n,v,d
-e,x,y,n,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,k,v,d
-e,x,f,e,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,k,v,d
-e,x,f,e,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,n,y,d
-e,x,y,n,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,n,v,d
-e,f,f,n,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,k,v,d
-e,x,f,e,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,n,v,d
-e,x,f,g,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,k,v,d
-e,f,f,n,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,k,y,d
-e,x,y,e,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,n,v,d
-e,x,f,g,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,k,y,d
-e,f,f,g,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,k,y,d
-e,f,f,g,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,k,v,d
-e,f,f,n,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,k,y,d
-e,x,f,n,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,n,y,d
-e,x,y,e,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,n,y,d
-e,f,s,g,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,k,s,g
-e,x,y,e,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,k,y,d
-e,f,f,g,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,n,y,d
-e,x,y,e,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,k,y,d
-e,x,f,g,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,n,v,d
-e,f,f,n,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,k,y,d
-e,x,f,e,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,n,v,d
-e,x,f,e,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,n,v,d
-e,x,f,g,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,k,v,d
-p,x,f,w,f,c,f,w,n,g,e,b,s,s,w,w,p,w,o,p,n,s,d
-e,x,y,e,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,n,y,d
-e,x,f,e,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,k,v,d
-e,x,y,n,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,n,y,d
-e,f,f,w,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,n,s,g
-e,x,f,g,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,n,y,d
-e,f,f,g,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,n,v,d
-e,x,f,g,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,n,y,d
-e,f,y,e,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,k,y,d
-e,x,f,e,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,k,y,d
-e,x,y,n,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,k,v,d
-e,x,y,e,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,n,v,d
-e,x,f,e,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,n,v,d
-e,f,y,e,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,k,v,d
-e,f,f,n,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,n,v,d
-e,x,y,g,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,n,v,d
-e,x,y,g,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,n,y,d
-e,x,f,g,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,n,v,d
-e,x,f,e,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,n,v,d
-e,x,f,n,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,k,y,d
-e,f,f,n,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,k,s,g
-e,x,f,e,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,n,y,d
-e,x,y,g,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,n,y,d
-e,x,y,n,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,k,v,d
-e,x,f,g,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,n,y,d
-e,x,y,e,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,n,y,d
-e,f,f,n,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,k,v,d
-e,x,y,g,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,k,y,d
-e,f,f,g,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,n,v,d
-e,x,f,e,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,k,v,d
-e,f,f,n,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,k,v,d
-e,x,y,g,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,n,y,d
-e,x,y,n,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,n,y,d
-e,x,y,e,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,k,v,d
-e,x,f,e,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,k,y,d
-e,x,f,n,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,n,v,d
-e,f,f,n,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,k,y,d
-e,x,y,g,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,k,v,d
-e,x,f,e,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,n,y,d
-e,f,f,n,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,n,v,d
-e,f,f,n,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,n,y,d
-e,x,y,e,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,k,y,d
-e,x,f,g,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,k,y,d
-e,x,f,g,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,k,v,d
-p,x,f,p,f,c,f,w,n,u,e,b,s,s,w,w,p,w,o,p,k,v,d
-e,x,y,n,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,n,y,d
-e,x,f,e,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,n,v,d
-e,f,s,n,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,n,s,g
-e,x,y,g,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,k,y,d
-e,x,y,g,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,k,y,d
-e,x,f,n,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,k,y,d
-e,f,f,g,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,n,y,d
-e,x,f,g,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,n,v,d
-e,x,y,n,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,k,y,d
-e,x,f,e,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,n,y,d
-e,x,f,n,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,n,v,d
-e,f,f,g,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,n,v,d
-e,f,f,n,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,n,y,d
-e,x,y,g,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,n,v,d
-e,x,y,e,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,n,y,d
-e,x,y,g,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,k,v,d
-e,f,f,g,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,k,y,d
-e,x,f,e,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,n,y,d
-p,x,s,p,f,c,f,w,n,u,e,b,s,s,w,w,p,w,o,p,k,s,d
-e,f,y,n,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,k,v,d
-e,x,y,e,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,k,v,d
-e,x,y,g,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,k,y,d
-e,x,y,g,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,k,v,d
-e,x,f,g,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,k,y,d
-e,x,f,e,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,n,y,d
-e,x,y,n,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,n,y,d
-e,x,y,g,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,n,y,d
-e,x,y,e,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,k,v,d
-e,x,y,n,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,k,v,d
-e,x,y,g,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,n,v,d
-e,x,y,n,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,n,y,d
-e,x,f,e,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,k,y,d
-e,x,f,e,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,n,y,d
-e,x,y,g,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,n,y,d
-e,f,f,g,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,k,y,d
-e,f,f,e,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,n,y,d
-e,f,f,g,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,n,y,d
-e,x,y,e,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,n,y,d
-e,x,y,e,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,n,v,d
-e,x,s,w,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,k,s,g
-e,x,y,n,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,k,v,d
-e,x,y,e,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,n,y,d
-e,f,f,e,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,k,v,d
-e,x,f,g,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,k,y,d
-e,f,f,n,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,k,v,d
-p,x,s,g,f,c,f,c,n,n,e,b,s,s,w,w,p,w,o,p,k,v,d
-e,f,y,e,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,k,v,d
-e,x,y,g,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,k,v,d
-e,x,f,n,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,k,a,g
-e,x,y,e,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,k,y,d
-p,x,f,w,f,c,f,w,n,u,e,b,s,s,w,w,p,w,o,p,n,v,d
-e,x,y,e,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,k,v,d
-e,f,f,g,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,k,v,d
-e,f,f,n,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,k,v,d
-e,x,y,n,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,k,v,d
-e,f,f,n,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,k,v,d
-e,x,f,g,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,n,y,d
-e,f,y,n,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,k,v,d
-e,x,y,g,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,n,y,d
-e,f,y,n,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,n,y,d
-e,x,f,g,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,n,v,d
-p,x,f,g,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,y,p
-e,x,y,g,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,n,y,d
-e,x,y,g,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,k,v,d
-e,x,f,g,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,k,y,d
-e,x,f,e,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,k,y,d
-e,x,f,g,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,k,y,d
-e,x,f,g,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,n,y,d
-e,f,f,g,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,n,y,d
-e,x,y,e,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,n,y,d
-e,x,f,g,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,n,v,d
-e,x,y,e,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,k,v,d
-e,x,f,n,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,k,y,d
-e,x,y,g,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,n,v,d
-e,x,y,e,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,n,y,d
-e,f,f,n,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,k,y,d
-e,x,f,e,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,k,y,d
-e,f,y,n,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,n,v,d
-e,x,y,n,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,k,y,d
-e,x,y,g,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,n,v,d
-e,f,f,e,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,k,y,d
-e,x,f,g,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,k,y,d
-e,x,y,g,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,k,y,d
-e,x,y,g,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,n,y,d
-p,x,f,g,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,y,p
-e,x,f,g,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,k,v,d
-e,x,f,g,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,n,y,d
-e,x,f,n,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,n,v,d
-e,x,f,g,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,n,v,d
-e,f,f,n,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,k,y,d
-e,x,y,n,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,k,y,d
-e,x,f,e,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,k,v,d
-e,x,y,n,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,k,v,d
-e,f,y,g,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,k,v,d
-e,f,f,g,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,n,v,d
-p,x,f,g,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,v,p
-e,x,f,n,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,n,y,d
-e,x,y,n,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,k,y,d
-e,x,f,e,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,n,v,d
-e,x,f,n,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,n,v,d
-e,f,f,n,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,n,y,d
-e,x,y,g,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,n,v,d
-e,x,y,n,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,k,v,d
-e,x,y,e,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,n,y,d
-e,f,f,g,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,k,y,d
-e,x,f,g,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,k,v,d
-e,x,y,e,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,n,v,d
-e,x,y,e,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,n,v,d
-e,x,f,g,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,k,y,d
-e,x,y,n,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,n,v,d
-e,x,f,e,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,k,y,d
-e,f,f,n,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,n,y,d
-e,x,f,e,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,n,y,d
-e,f,f,n,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,n,v,d
-e,x,y,e,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,k,y,d
-e,x,f,n,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,k,y,d
-e,x,f,e,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,n,y,d
-e,x,f,e,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,k,v,d
-e,x,f,n,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,k,v,d
-e,x,y,n,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,n,y,d
-e,x,y,e,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,n,y,d
-e,f,f,n,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,n,y,d
-e,x,y,n,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,k,y,d
-e,x,y,e,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,n,v,d
-e,f,f,g,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,n,s,g
-e,x,y,e,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,k,v,d
-e,x,y,e,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,n,y,d
-e,f,f,n,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,n,y,d
-e,f,f,n,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,k,y,d
-e,x,f,g,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,k,v,d
-e,x,y,g,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,k,v,d
-e,x,f,n,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,n,y,d
-e,f,f,n,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,n,y,d
-e,x,y,n,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,k,y,d
-e,x,y,g,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,k,v,d
-e,x,f,g,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,n,y,d
-e,x,y,e,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,k,y,d
-e,x,f,g,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,n,v,d
-e,x,y,e,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,n,v,d
-e,x,f,g,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,n,v,d
-e,x,f,e,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,n,v,d
-e,x,f,e,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,n,v,d
-e,x,f,e,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,k,y,d
-e,f,y,g,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,k,y,d
-e,x,f,g,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,k,y,d
-e,x,f,e,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,k,v,d
-e,x,f,g,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,n,y,d
-e,f,f,n,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,k,v,d
-p,x,f,g,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,v,d
-e,x,f,n,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,k,v,d
-e,x,y,g,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,n,y,d
-e,f,f,n,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,n,y,d
-e,x,f,e,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,k,v,d
-e,f,f,g,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,k,v,d
-e,x,y,n,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,k,y,d
-e,x,y,e,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,k,y,d
-e,x,y,e,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,k,v,d
-e,x,y,n,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,n,v,d
-e,x,y,n,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,k,v,d
-p,x,y,y,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,v,p
-e,f,y,n,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,n,y,d
-p,x,f,g,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,y,d
-p,x,f,g,f,c,f,w,n,g,e,b,s,s,w,w,p,w,o,p,k,s,d
-e,f,f,g,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,n,y,d
-e,f,f,e,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,n,v,d
-e,f,y,n,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,k,y,d
-e,f,f,e,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,k,y,d
-p,x,f,g,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,y,p
-e,f,f,g,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,n,v,d
-e,f,y,e,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,k,y,d
-e,f,y,e,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,n,y,d
-p,x,f,w,f,c,f,c,n,p,e,b,s,s,w,w,p,w,o,p,n,s,d
-e,x,f,e,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,n,y,d
-e,f,y,n,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,k,v,d
-p,x,f,g,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,v,g
-p,x,f,g,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,y,p
-e,f,y,e,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,n,v,d
-e,f,y,e,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,k,v,d
-e,f,f,n,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,k,v,d
-e,f,f,g,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,k,v,d
-e,f,f,e,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,n,y,d
-e,f,y,e,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,n,v,d
-e,f,y,e,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,n,y,d
-e,f,y,e,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,k,v,d
-p,x,f,p,f,c,f,w,n,u,e,b,s,s,w,w,p,w,o,p,k,s,d
-p,x,f,g,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,y,d
-e,f,f,e,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,n,y,d
-e,f,y,g,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,k,v,d
-e,f,y,g,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,k,v,d
-e,f,f,g,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,n,y,d
-e,f,f,e,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,k,y,d
-e,f,y,e,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,n,v,d
-p,f,f,g,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,v,p
-e,f,y,n,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,n,y,d
-e,f,y,e,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,n,v,d
-p,x,f,g,f,c,f,c,n,p,e,b,s,s,w,w,p,w,o,p,n,s,d
-e,f,f,e,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,k,y,d
-e,f,y,e,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,k,v,d
-p,x,y,y,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,v,g
-p,f,f,y,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,v,d
-e,f,y,n,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,k,v,d
-e,f,y,n,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,k,v,d
-e,f,y,g,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,n,v,d
-e,f,y,n,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,n,v,d
-e,x,y,e,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,k,v,d
-p,x,s,p,f,c,f,w,n,n,e,b,s,s,w,w,p,w,o,p,n,v,d
-e,f,y,n,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,k,y,d
-e,x,y,e,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,k,v,d
-e,f,f,e,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,k,y,d
-e,f,y,n,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,k,v,d
-p,x,s,w,f,c,f,c,n,p,e,b,s,s,w,w,p,w,o,p,k,v,d
-e,f,f,g,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,n,v,d
-e,f,y,g,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,n,y,d
-e,f,y,g,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,n,v,d
-e,f,y,n,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,k,v,d
-p,x,s,g,f,c,f,c,n,u,e,b,s,s,w,w,p,w,o,p,n,s,d
-p,x,f,p,f,c,f,c,n,n,e,b,s,s,w,w,p,w,o,p,k,s,d
-e,f,y,e,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,k,y,d
-p,x,f,g,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,y,d
-p,x,s,w,f,c,f,w,n,g,e,b,s,s,w,w,p,w,o,p,k,v,d
-p,x,f,g,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,v,p
-e,f,y,n,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,n,v,d
-p,x,y,y,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,y,p
-e,f,y,g,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,n,v,d
-e,f,y,n,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,k,y,d
-e,f,y,g,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,k,y,d
-p,x,s,w,f,c,f,c,n,p,e,b,s,s,w,w,p,w,o,p,n,s,d
-p,x,f,g,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,v,p
-e,f,y,g,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,k,v,d
-e,x,y,n,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,k,y,d
-e,f,y,g,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,k,y,d
-p,x,f,g,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,v,p
-e,f,y,n,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,n,y,d
-p,f,f,g,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,y,p
-e,f,f,e,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,n,y,d
-e,x,f,n,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,k,v,d
-p,f,f,g,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,v,p
-e,f,y,g,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,k,v,d
-e,f,y,g,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,n,y,d
-e,f,y,e,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,n,y,d
-p,x,f,g,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,v,p
-e,f,f,e,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,n,v,d
-e,f,y,n,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,k,v,d
-p,x,f,g,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,v,g
-e,f,y,e,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,k,y,d
-p,x,f,g,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,y,d
-e,f,y,e,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,n,y,d
-p,x,f,g,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,y,g
-p,x,f,g,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,y,p
-p,x,f,g,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,y,g
-e,f,y,e,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,n,y,d
-p,x,f,g,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,v,g
-e,f,f,e,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,n,v,d
-p,x,s,g,f,c,f,w,n,g,e,b,s,s,w,w,p,w,o,p,n,v,d
-e,f,y,g,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,k,v,d
-p,x,s,p,f,c,f,c,n,p,e,b,s,s,w,w,p,w,o,p,n,s,d
-e,f,f,e,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,n,y,d
-e,f,f,e,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,n,y,d
-e,f,y,e,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,k,y,d
-e,f,y,n,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,k,y,d
-p,x,f,g,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,y,g
-p,f,f,y,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,y,p
-p,x,s,g,f,c,f,w,n,g,e,b,s,s,w,w,p,w,o,p,k,s,d
-e,f,f,e,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,k,v,d
-e,f,f,e,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,k,y,d
-e,f,y,e,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,k,v,d
-e,f,y,n,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,k,y,d
-p,x,f,g,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,v,g
-p,x,f,g,f,c,f,c,n,u,e,b,s,s,w,w,p,w,o,p,k,v,d
-e,f,f,g,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,k,y,d
-e,f,y,e,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,n,v,d
-e,f,y,e,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,k,v,d
-e,f,f,g,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,n,v,d
-e,f,y,n,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,k,y,d
-e,f,y,n,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,k,y,d
-e,f,f,e,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,k,v,d
-p,x,s,g,f,c,f,c,n,u,e,b,s,s,w,w,p,w,o,p,k,v,d
-p,x,f,w,f,c,f,c,n,n,e,b,s,s,w,w,p,w,o,p,n,v,d
-e,f,f,g,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,k,y,d
-e,f,y,g,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,n,y,d
-p,x,f,g,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,y,d
-e,x,y,e,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,k,v,d
-p,x,s,w,f,c,f,w,n,p,e,b,s,s,w,w,p,w,o,p,k,s,d
-e,f,y,g,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,n,y,d
-e,f,y,g,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,k,y,d
-e,f,y,g,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,n,v,d
-e,f,f,e,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,n,v,d
-e,f,y,n,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,n,y,d
-e,f,y,g,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,k,v,d
-e,f,y,n,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,n,y,d
-e,f,y,g,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,k,y,d
-e,f,y,g,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,n,v,d
-e,x,f,n,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,k,y,d
-e,f,y,e,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,k,y,d
-e,f,f,e,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,k,v,d
-e,f,y,e,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,n,v,d
-p,x,f,y,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,v,p
-e,f,y,g,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,n,y,d
-e,f,y,n,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,k,v,d
-p,x,f,g,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,y,d
-e,f,y,g,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,k,y,d
-e,f,y,g,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,k,y,d
-e,f,y,g,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,n,y,d
-e,f,y,e,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,n,y,d
-e,f,y,g,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,k,v,d
-p,x,s,w,f,c,f,c,n,g,e,b,s,s,w,w,p,w,o,p,n,s,d
-p,x,f,w,f,c,f,w,n,n,e,b,s,s,w,w,p,w,o,p,n,s,d
-e,f,f,n,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,k,v,d
-e,f,f,e,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,k,y,d
-e,f,y,e,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,n,y,d
-p,x,f,g,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,y,d
-e,f,f,g,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,n,v,d
-e,f,f,g,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,k,y,d
-e,f,f,g,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,k,y,d
-p,x,s,w,f,c,f,w,n,g,e,b,s,s,w,w,p,w,o,p,n,s,d
-e,x,y,e,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,k,v,d
-e,f,y,e,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,k,v,d
-p,x,f,g,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,y,d
-e,f,f,e,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,k,y,d
-p,x,f,g,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,v,p
-p,x,f,p,f,c,f,w,n,g,e,b,s,s,w,w,p,w,o,p,k,s,d
-e,f,y,g,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,k,v,d
-p,x,f,g,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,v,p
-p,x,f,w,f,c,f,c,n,g,e,b,s,s,w,w,p,w,o,p,n,s,d
-p,x,f,g,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,v,p
-e,f,y,e,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,n,v,d
-p,x,y,g,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,v,p
-p,x,s,g,f,c,f,w,n,u,e,b,s,s,w,w,p,w,o,p,n,s,d
-e,f,y,g,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,n,y,d
-e,f,y,n,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,k,v,d
-e,f,f,e,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,n,y,d
-p,x,f,g,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,v,p
-p,x,f,p,f,c,f,w,n,g,e,b,s,s,w,w,p,w,o,p,k,v,d
-e,x,y,n,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,n,v,d
-e,f,f,e,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,k,y,d
-e,f,y,n,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,k,y,d
-e,f,y,g,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,k,v,d
-p,x,y,g,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,v,g
-e,f,y,n,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,n,v,d
-p,x,f,p,f,c,f,c,n,u,e,b,s,s,w,w,p,w,o,p,k,v,d
-p,x,f,w,f,c,f,c,n,n,e,b,s,s,w,w,p,w,o,p,k,s,d
-e,f,f,e,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,n,y,d
-e,f,y,n,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,n,y,d
-e,f,y,e,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,k,v,d
-e,f,y,g,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,k,v,d
-p,x,f,g,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,v,d
-p,x,f,g,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,y,p
-e,f,f,e,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,k,y,d
-p,x,y,y,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,v,g
-p,x,s,p,f,c,f,c,n,u,e,b,s,s,w,w,p,w,o,p,k,s,d
-e,f,f,e,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,n,y,d
-p,f,f,y,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,v,d
-e,f,y,e,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,k,y,d
-e,f,y,e,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,n,v,d
-e,x,f,n,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,k,y,d
-e,f,f,e,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,k,v,d
-e,f,f,g,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,n,v,d
-p,x,f,g,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,v,g
-e,f,y,e,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,n,y,d
-p,f,f,g,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,y,p
-e,f,y,e,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,n,y,d
-e,f,f,e,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,k,v,d
-e,f,f,g,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,n,y,d
-p,x,f,g,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,y,g
-e,f,y,g,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,k,v,d
-e,f,y,g,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,k,y,d
-p,x,f,g,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,v,g
-p,x,f,w,f,c,f,c,n,u,e,b,s,s,w,w,p,w,o,p,n,s,d
-e,f,f,e,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,k,v,d
-p,x,f,y,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,v,d
-e,f,y,e,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,n,y,d
-e,f,y,g,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,n,v,d
-e,f,f,e,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,k,v,d
-e,f,f,g,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,n,v,d
-e,f,y,g,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,k,v,d
-e,f,f,e,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,n,v,d
-p,x,s,g,f,c,f,w,n,g,e,b,s,s,w,w,p,w,o,p,k,v,d
-e,f,y,g,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,n,y,d
-e,f,y,n,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,n,y,d
-e,f,y,g,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,n,y,d
-e,f,y,g,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,n,y,d
-e,f,y,n,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,k,y,d
-e,x,f,g,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,n,v,d
-e,f,f,g,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,n,y,d
-p,x,f,g,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,y,p
-p,x,s,w,f,c,f,w,n,p,e,b,s,s,w,w,p,w,o,p,k,v,d
-e,f,f,g,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,k,v,d
-e,f,f,g,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,k,v,d
-e,f,y,g,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,n,v,d
-p,x,f,g,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,y,p
-p,x,s,w,f,c,f,w,n,u,e,b,s,s,w,w,p,w,o,p,n,s,d
-p,x,f,g,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,y,p
-e,x,y,g,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,k,y,d
-p,x,f,g,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,v,g
-e,f,y,e,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,n,y,d
-e,f,y,g,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,n,y,d
-e,x,f,g,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,k,v,d
-p,x,f,g,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,v,d
-e,f,y,g,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,k,y,d
-e,f,y,g,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,k,v,d
-p,x,s,w,f,c,f,c,n,p,e,b,s,s,w,w,p,w,o,p,k,s,d
-p,x,y,y,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,y,g
-e,f,f,e,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,k,y,d
-p,x,f,g,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,y,d
-p,x,f,g,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,y,d
-e,x,y,n,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,n,v,d
-p,x,f,g,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,y,g
-e,f,y,n,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,n,y,d
-e,f,f,g,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,k,y,d
-e,f,y,g,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,n,v,d
-e,f,y,g,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,k,y,d
-p,x,s,g,f,c,f,w,n,n,e,b,s,s,w,w,p,w,o,p,n,s,d
-e,f,f,g,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,k,v,d
-e,f,y,e,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,n,v,d
-e,f,y,n,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,n,v,d
-e,f,y,g,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,k,v,d
-p,x,f,p,f,c,f,c,n,g,e,b,s,s,w,w,p,w,o,p,n,v,d
-p,x,f,g,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,y,g
-e,f,y,g,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,k,y,d
-e,f,f,g,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,k,y,d
-p,x,s,p,f,c,f,w,n,p,e,b,s,s,w,w,p,w,o,p,n,v,d
-e,x,y,g,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,n,y,d
-p,x,f,g,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,y,p
-p,x,f,g,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,y,p
-e,f,y,n,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,n,y,d
-e,f,y,n,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,n,v,d
-p,x,s,g,f,c,f,c,n,u,e,b,s,s,w,w,p,w,o,p,n,v,d
-p,f,f,g,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,v,d
-p,x,f,p,f,c,f,w,n,p,e,b,s,s,w,w,p,w,o,p,n,s,d
-e,f,y,e,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,k,v,d
-e,f,y,g,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,k,v,d
-e,f,f,e,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,n,y,d
-p,x,f,y,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,y,g
-e,f,f,g,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,k,y,d
-p,x,f,w,f,c,f,w,n,p,e,b,s,s,w,w,p,w,o,p,k,v,d
-p,x,f,g,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,v,d
-e,f,f,e,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,n,y,d
-e,f,y,g,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,n,y,d
-p,x,f,g,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,v,p
-p,x,f,g,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,v,p
-p,x,s,p,f,c,f,w,n,p,e,b,s,s,w,w,p,w,o,p,k,v,d
-e,x,y,g,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,k,v,d
-p,x,f,g,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,v,d
-e,f,y,n,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,n,y,d
-e,x,y,n,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,n,v,d
-e,f,y,e,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,n,y,d
-e,f,y,g,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,n,y,d
-e,f,f,e,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,k,y,d
-e,f,y,g,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,k,y,d
-e,f,f,g,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,n,y,d
-p,x,f,p,f,c,f,w,n,n,e,b,s,s,w,w,p,w,o,p,k,s,d
-e,f,f,e,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,n,v,d
-e,f,f,e,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,k,v,d
-p,x,s,w,f,c,f,w,n,n,e,b,s,s,w,w,p,w,o,p,n,s,d
-e,f,f,g,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,k,y,d
-e,f,y,n,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,k,v,d
-p,x,f,g,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,v,g
-e,f,f,e,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,n,v,d
-p,x,f,g,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,y,g
-e,f,f,e,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,n,y,d
-e,f,y,g,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,n,y,d
-e,f,f,g,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,n,v,d
-p,x,f,g,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,v,g
-e,f,y,e,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,n,v,d
-p,x,f,w,f,c,f,c,n,p,e,b,s,s,w,w,p,w,o,p,k,s,d
-e,f,y,n,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,n,y,d
-e,x,y,g,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,k,y,d
-e,f,y,n,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,n,y,d
-e,f,y,e,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,n,v,d
-e,f,y,g,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,n,y,d
-e,f,y,e,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,n,v,d
-p,x,f,g,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,y,d
-e,f,y,g,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,k,y,d
-e,f,y,g,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,n,v,d
-e,f,f,g,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,n,y,d
-e,f,y,g,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,n,v,d
-e,f,f,e,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,k,y,d
-e,f,f,e,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,k,y,d
-e,f,y,n,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,n,v,d
-e,f,f,n,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,k,v,d
-p,x,f,p,f,c,f,c,n,u,e,b,s,s,w,w,p,w,o,p,n,s,d
-p,x,f,g,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,v,d
-e,f,y,g,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,n,v,d
-e,f,y,e,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,n,v,d
-p,x,s,w,f,c,f,c,n,g,e,b,s,s,w,w,p,w,o,p,k,s,d
-p,x,f,p,f,c,f,w,n,g,e,b,s,s,w,w,p,w,o,p,n,v,d
-p,x,f,g,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,y,p
-e,x,f,g,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,k,v,d
-e,f,y,e,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,k,v,d
-p,x,s,p,f,c,f,w,n,p,e,b,s,s,w,w,p,w,o,p,k,s,d
-e,f,y,g,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,n,y,d
-e,f,y,n,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,k,y,d
-p,x,f,g,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,v,g
-e,f,y,n,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,k,v,d
-e,x,f,e,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,k,y,d
-p,x,f,w,f,c,f,w,n,g,e,b,s,s,w,w,p,w,o,p,k,v,d
-e,f,y,n,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,n,y,d
-p,x,f,g,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,v,d
-e,f,y,e,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,k,y,d
-p,x,f,y,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,y,p
-e,f,f,g,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,k,v,d
-e,x,f,e,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,k,y,d
-e,f,y,e,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,n,v,d
-e,f,y,g,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,k,v,d
-p,x,f,g,f,c,f,w,n,u,e,b,s,s,w,w,p,w,o,p,k,v,d
-e,f,y,g,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,n,y,d
-e,f,f,e,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,k,v,d
-p,x,f,g,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,y,p
-e,f,y,n,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,n,v,d
-p,x,s,g,f,c,f,w,n,g,e,b,s,s,w,w,p,w,o,p,n,s,d
-p,x,f,g,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,v,g
-e,f,f,e,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,k,v,d
-e,f,y,n,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,n,v,d
-e,f,y,g,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,k,v,d
-e,f,f,g,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,n,y,d
-p,x,f,w,f,c,f,c,n,p,e,b,s,s,w,w,p,w,o,p,k,v,d
-p,x,f,g,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,v,g
-p,x,f,g,f,c,f,w,n,u,e,b,s,s,w,w,p,w,o,p,n,v,d
-e,f,f,e,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,n,v,d
-e,f,y,e,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,n,v,d
-p,f,y,g,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,y,d
-p,x,f,g,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,y,g
-e,f,y,e,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,n,y,d
-p,x,f,p,f,c,f,w,n,g,e,b,s,s,w,w,p,w,o,p,n,s,d
-p,x,s,g,f,c,f,c,n,u,e,b,s,s,w,w,p,w,o,p,k,s,d
-e,f,y,e,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,k,v,d
-e,f,f,e,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,n,y,d
-e,f,y,e,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,k,y,d
-e,f,y,n,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,k,v,d
-p,x,s,p,f,c,f,w,n,g,e,b,s,s,w,w,p,w,o,p,n,v,d
-p,x,f,g,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,y,d
-e,f,f,e,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,k,v,d
-e,f,y,e,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,n,v,d
-e,f,f,g,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,k,v,d
-e,f,f,e,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,n,y,d
-e,f,y,n,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,n,y,d
-e,f,y,e,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,k,v,d
-p,f,f,y,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,v,p
-e,x,f,g,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,k,y,d
-p,x,s,p,f,c,f,w,n,p,e,b,s,s,w,w,p,w,o,p,n,s,d
-e,x,y,e,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,n,y,d
-e,f,f,g,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,k,y,d
-e,f,f,e,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,n,v,d
-e,x,f,e,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,k,v,d
-e,f,y,g,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,k,v,d
-e,f,f,e,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,n,v,d
-p,x,s,w,f,c,f,w,n,n,e,b,s,s,w,w,p,w,o,p,n,v,d
-p,x,y,g,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,y,d
-p,x,f,p,f,c,f,c,n,n,e,b,s,s,w,w,p,w,o,p,k,v,d
-e,f,y,g,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,n,y,d
-e,x,y,n,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,n,v,d
-p,x,f,g,f,c,f,c,n,n,e,b,s,s,w,w,p,w,o,p,n,s,d
-e,f,y,n,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,k,y,d
-p,x,f,p,f,c,f,c,n,g,e,b,s,s,w,w,p,w,o,p,k,v,d
-e,f,f,e,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,n,y,d
-p,x,s,w,f,c,f,w,n,n,e,b,s,s,w,w,p,w,o,p,k,s,d
-e,x,y,n,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,n,v,d
-p,x,f,g,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,y,g
-p,x,s,g,f,c,f,c,n,n,e,b,s,s,w,w,p,w,o,p,k,s,d
-e,f,y,n,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,n,y,d
-p,x,f,g,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,y,g
-p,x,s,p,f,c,f,c,n,p,e,b,s,s,w,w,p,w,o,p,k,v,d
-e,f,y,e,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,k,y,d
-p,x,s,g,f,c,f,w,n,p,e,b,s,s,w,w,p,w,o,p,n,s,d
-e,x,y,g,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,n,v,d
-e,x,y,g,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,k,y,d
-e,f,y,n,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,n,y,d
-e,f,y,n,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,k,v,d
-e,f,f,n,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,n,y,d
-p,x,s,g,f,c,f,c,n,n,e,b,s,s,w,w,p,w,o,p,n,s,d
-e,f,f,g,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,k,v,d
-e,x,f,e,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,n,v,d
-e,f,f,g,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,n,y,d
-e,f,y,e,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,n,y,d
-e,f,y,n,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,k,v,d
-p,x,f,g,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,y,g
-p,x,s,g,f,c,f,c,n,p,e,b,s,s,w,w,p,w,o,p,n,s,d
-e,f,y,e,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,k,y,d
-e,x,y,g,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,n,y,d
-p,x,f,g,f,c,f,w,n,g,e,b,s,s,w,w,p,w,o,p,k,v,d
-e,f,y,n,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,k,y,d
-e,f,y,g,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,k,y,d
-e,f,f,e,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,k,v,d
-p,x,f,y,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,v,p
-p,x,s,g,f,c,f,w,n,p,e,b,s,s,w,w,p,w,o,p,k,v,d
-e,f,y,g,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,n,y,d
-e,f,y,n,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,n,y,d
-e,f,y,n,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,k,y,d
-e,f,y,g,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,n,v,d
-e,f,y,g,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,k,v,d
-p,x,f,g,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,v,p
-e,f,y,g,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,k,y,d
-e,f,f,e,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,n,v,d
-p,x,f,g,f,c,f,w,n,g,e,b,s,s,w,w,p,w,o,p,n,v,d
-e,f,y,g,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,n,v,d
-p,f,y,g,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,y,p
-e,f,f,e,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,k,y,d
-p,x,f,g,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,v,p
-e,f,y,g,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,k,v,d
-p,x,f,p,f,c,f,c,n,n,e,b,s,s,w,w,p,w,o,p,n,s,d
-e,f,f,e,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,n,v,d
-p,x,f,p,f,c,f,w,n,p,e,b,s,s,w,w,p,w,o,p,n,v,d
-p,x,s,w,f,c,f,c,n,u,e,b,s,s,w,w,p,w,o,p,n,s,d
-e,f,y,g,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,n,v,d
-p,x,s,p,f,c,f,w,n,u,e,b,s,s,w,w,p,w,o,p,n,v,d
-p,x,f,g,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,y,d
-e,x,f,n,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,n,v,d
-e,f,y,g,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,k,y,d
-e,f,y,e,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,k,y,d
-p,x,f,g,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,y,g
-p,x,f,g,f,c,f,w,n,n,e,b,s,s,w,w,p,w,o,p,n,v,d
-p,x,f,g,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,v,p
-p,x,f,w,f,c,f,w,n,u,e,b,s,s,w,w,p,w,o,p,n,s,d
-e,f,y,n,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,k,y,d
-e,f,y,g,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,k,y,d
-p,x,f,w,f,c,f,w,n,g,e,b,s,s,w,w,p,w,o,p,k,s,d
-e,x,y,g,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,k,y,d
-e,f,y,n,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,k,v,d
-e,f,f,g,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,k,y,d
-e,x,y,e,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,k,y,d
-e,f,y,e,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,k,y,d
-p,x,f,w,f,c,f,w,n,n,e,b,s,s,w,w,p,w,o,p,k,s,d
-e,f,f,e,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,n,y,d
-p,x,y,y,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,y,d
-e,f,f,g,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,n,v,d
-e,f,y,n,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,n,v,d
-p,x,f,g,f,c,f,w,n,p,e,b,s,s,w,w,p,w,o,p,n,v,d
-p,x,s,p,f,c,f,c,n,n,e,b,s,s,w,w,p,w,o,p,n,v,d
-e,f,f,e,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,n,y,d
-e,x,f,g,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,k,v,d
-e,f,y,g,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,k,y,d
-p,x,f,g,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,v,p
-e,x,y,n,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,k,v,d
-e,f,y,g,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,k,y,d
-e,f,f,g,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,n,y,d
-p,x,f,g,f,c,f,c,n,p,e,b,s,s,w,w,p,w,o,p,k,s,d
-e,f,y,n,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,n,v,d
-p,x,f,g,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,v,p
-e,f,y,g,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,k,v,d
-e,f,f,e,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,k,v,d
-e,f,f,e,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,k,y,d
-p,x,s,w,f,c,f,c,n,u,e,b,s,s,w,w,p,w,o,p,n,v,d
-p,x,f,w,f,c,f,w,n,p,e,b,s,s,w,w,p,w,o,p,n,s,d
-e,f,f,e,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,k,v,d
-e,f,f,e,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,k,v,d
-e,f,y,n,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,k,y,d
-p,x,f,g,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,v,g
-e,f,y,g,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,k,y,d
-p,x,f,g,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,y,p
-e,f,f,e,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,n,y,d
-p,x,f,g,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,v,g
-e,f,y,g,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,n,v,d
-e,f,f,e,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,k,y,d
-e,f,y,n,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,n,v,d
-p,x,f,g,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,y,d
-e,f,f,e,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,k,y,d
-e,x,f,g,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,n,v,d
-e,f,y,g,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,n,v,d
-p,x,f,w,f,c,f,w,n,u,e,b,s,s,w,w,p,w,o,p,k,s,d
-e,f,f,e,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,k,y,d
-e,f,f,e,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,k,y,d
-e,f,y,e,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,n,y,d
-e,x,f,e,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,k,v,d
-e,f,y,n,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,k,v,d
-p,x,f,g,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,v,g
-e,x,f,g,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,k,v,d
-p,x,f,g,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,y,d
-e,f,y,g,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,k,v,d
-e,f,y,n,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,n,y,d
-p,x,y,g,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,v,d
-e,f,y,g,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,n,y,d
-p,f,f,g,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,v,d
-e,f,y,e,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,k,y,d
-p,x,s,g,f,c,f,c,n,g,e,b,s,s,w,w,p,w,o,p,n,v,d
-e,f,y,e,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,k,v,d
-e,f,y,n,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,k,y,d
-e,x,f,e,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,k,v,d
-e,f,y,e,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,k,v,d
-e,f,y,g,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,n,v,d
-p,x,s,p,f,c,f,w,n,n,e,b,s,s,w,w,p,w,o,p,k,s,d
-p,f,f,g,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,v,d
-e,f,f,g,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,n,v,d
-e,f,f,e,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,n,y,d
-p,x,f,p,f,c,f,c,n,u,e,b,s,s,w,w,p,w,o,p,n,v,d
-e,f,f,n,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,n,y,d
-e,f,y,g,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,k,v,d
-e,f,y,e,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,k,y,d
-e,f,y,e,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,k,y,d
-e,x,f,g,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,n,y,d
-p,x,s,w,f,c,f,w,n,p,e,b,s,s,w,w,p,w,o,p,n,v,d
-e,f,y,g,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,k,y,d
-p,x,y,g,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,v,d
-e,f,y,n,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,k,v,d
-e,f,y,n,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,k,v,d
-p,x,s,g,f,c,f,w,n,n,e,b,s,s,w,w,p,w,o,p,k,s,d
-e,f,y,g,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,n,y,d
-e,f,y,e,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,n,v,d
-p,x,f,g,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,y,d
-e,x,y,n,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,n,y,d
-e,f,y,e,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,n,v,d
-e,f,f,e,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,k,v,d
-p,x,s,p,f,c,f,c,n,n,e,b,s,s,w,w,p,w,o,p,n,s,d
-p,x,f,g,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,v,d
-e,f,y,e,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,k,v,d
-e,f,y,g,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,k,v,d
-p,x,s,p,f,c,f,c,n,p,e,b,s,s,w,w,p,w,o,p,n,v,d
-e,f,f,g,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,n,y,d
-e,f,f,e,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,k,y,d
-e,f,f,n,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,k,v,d
-e,x,y,g,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,n,v,d
-e,x,y,n,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,n,y,d
-e,f,y,n,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,n,y,d
-p,x,f,p,f,c,f,c,n,p,e,b,s,s,w,w,p,w,o,p,k,v,d
-e,f,f,e,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,k,v,d
-p,x,y,y,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,y,d
-p,x,s,g,f,c,f,w,n,p,e,b,s,s,w,w,p,w,o,p,n,v,d
-e,f,y,n,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,k,v,d
-p,f,f,g,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,v,d
-p,x,f,g,f,c,f,w,n,n,e,b,s,s,w,w,p,w,o,p,k,s,d
-e,f,y,g,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,k,y,d
-e,f,y,g,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,k,v,d
-e,f,y,g,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,n,v,d
-e,x,y,n,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,n,y,d
-p,x,f,y,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,v,g
-e,f,f,g,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,n,y,d
-p,x,f,g,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,v,p
-e,f,y,e,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,n,v,d
-e,f,f,e,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,n,y,d
-p,f,f,g,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,y,d
-e,f,f,e,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,k,v,d
-p,x,f,y,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,y,p
-e,f,f,e,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,n,v,d
-e,f,f,g,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,n,v,d
-e,f,f,e,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,k,v,d
-e,f,y,e,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,n,v,d
-e,f,y,e,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,k,y,d
-e,f,y,n,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,n,v,d
-e,f,f,g,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,n,v,d
-e,f,f,e,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,n,v,d
-p,x,f,g,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,v,g
-e,f,y,n,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,k,y,d
-p,x,f,g,f,c,f,c,n,n,e,b,s,s,w,w,p,w,o,p,k,v,d
-e,f,y,n,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,n,v,d
-e,f,y,g,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,k,v,d
-e,x,f,e,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,k,y,d
-e,f,y,e,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,k,y,d
-e,f,f,g,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,n,v,d
-e,x,y,g,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,n,v,d
-p,x,f,p,f,c,f,c,n,g,e,b,s,s,w,w,p,w,o,p,k,s,d
-e,x,y,n,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,n,y,d
-e,f,y,n,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,k,v,d
-e,f,y,g,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,k,y,d
-p,f,f,g,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,y,g
-e,f,y,g,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,n,v,d
-e,f,y,e,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,k,y,d
-p,x,f,g,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,y,g
-e,f,y,n,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,k,y,d
-p,x,f,g,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,v,g
-e,f,f,e,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,k,y,d
-e,x,y,n,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,k,y,d
-e,x,y,g,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,k,y,d
-e,f,y,e,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,k,v,d
-p,x,y,g,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,v,d
-p,x,s,g,f,c,f,c,n,g,e,b,s,s,w,w,p,w,o,p,k,v,d
-p,x,f,g,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,v,g
-p,x,f,g,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,v,p
-p,f,f,g,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,y,p
-e,f,f,g,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,k,v,d
-p,x,f,g,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,v,d
-e,f,y,e,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,k,v,d
-e,f,y,n,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,n,v,d
-e,f,y,n,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,k,y,d
-p,x,y,y,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,y,d
-e,f,y,g,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,k,y,d
-p,x,f,p,f,c,f,w,n,u,e,b,s,s,w,w,p,w,o,p,n,s,d
-e,f,f,e,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,n,y,d
-e,f,y,n,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,k,y,d
-e,x,y,n,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,k,v,d
-e,f,f,e,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,n,y,d
-p,x,f,g,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,y,g
-p,x,f,g,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,v,p
-e,f,f,g,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,k,v,d
-e,x,f,n,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,n,v,d
-p,x,f,g,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,v,d
-e,f,f,g,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,n,y,d
-p,x,f,p,f,c,f,c,n,g,e,b,s,s,w,w,p,w,o,p,n,s,d
-e,f,y,n,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,n,v,d
-e,f,y,g,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,k,y,d
-p,x,f,g,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,y,p
-e,x,f,e,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,k,y,d
-p,x,s,g,f,c,f,w,n,u,e,b,s,s,w,w,p,w,o,p,k,s,d
-p,x,f,g,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,v,g
-e,f,y,n,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,n,v,d
-p,x,f,g,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,v,d
-e,f,y,e,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,k,v,d
-e,f,f,g,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,n,y,d
-e,f,f,g,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,n,y,d
-e,f,y,n,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,n,y,d
-e,f,y,e,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,n,y,d
-e,f,y,g,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,k,y,d
-e,f,y,e,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,k,y,d
-e,f,y,n,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,k,y,d
-p,x,y,y,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,y,g
-e,f,f,g,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,k,y,d
-e,f,y,n,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,n,v,d
-p,x,f,g,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,y,g
-p,x,f,g,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,v,d
-p,x,f,g,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,y,p
-e,f,y,e,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,n,y,d
-e,f,y,n,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,k,v,d
-p,x,y,g,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,y,d
-p,x,f,g,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,v,p
-p,f,y,g,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,v,p
-e,f,y,n,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,n,v,d
-p,x,f,g,f,c,f,w,n,n,e,b,s,s,w,w,p,w,o,p,n,s,d
-e,f,f,e,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,n,v,d
-p,f,f,y,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,y,d
-e,f,y,n,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,n,v,d
-e,x,y,e,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,k,y,d
-p,x,f,g,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,y,p
-e,x,y,n,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,k,y,d
-p,x,s,w,f,c,f,c,n,g,e,b,s,s,w,w,p,w,o,p,n,v,d
-e,f,y,g,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,k,v,d
-e,f,f,e,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,n,v,d
-e,f,y,g,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,n,y,d
-p,x,f,g,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,v,d
-e,f,y,e,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,n,v,d
-e,f,y,e,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,n,y,d
-p,x,s,p,f,c,f,c,n,g,e,b,s,s,w,w,p,w,o,p,k,s,d
-e,f,f,e,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,k,v,d
-e,f,f,g,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,k,y,d
-e,f,f,g,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,n,y,d
-e,f,f,g,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,n,v,d
-p,x,f,g,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,v,p
-e,f,y,n,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,n,y,d
-e,f,y,g,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,k,v,d
-e,f,y,n,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,k,y,d
-p,x,s,p,f,c,f,c,n,n,e,b,s,s,w,w,p,w,o,p,k,s,d
-e,f,y,e,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,k,y,d
-e,f,y,g,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,k,v,d
-e,f,y,e,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,k,y,d
-p,x,s,w,f,c,f,c,n,u,e,b,s,s,w,w,p,w,o,p,k,s,d
-p,x,f,g,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,v,p
-e,f,y,n,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,n,v,d
-p,x,f,g,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,v,g
-p,x,f,w,f,c,f,c,n,n,e,b,s,s,w,w,p,w,o,p,n,s,d
-e,f,y,n,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,k,y,d
-e,f,y,g,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,n,v,d
-e,f,y,g,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,n,v,d
-e,f,y,g,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,n,v,d
-p,x,f,w,f,c,f,w,n,p,e,b,s,s,w,w,p,w,o,p,n,v,d
-e,f,f,e,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,n,v,d
-p,x,f,g,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,v,g
-e,f,y,n,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,n,y,d
-e,f,y,n,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,n,v,d
-p,x,s,p,f,c,f,c,n,u,e,b,s,s,w,w,p,w,o,p,k,v,d
-e,f,y,g,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,n,y,d
-p,x,s,p,f,c,f,c,n,g,e,b,s,s,w,w,p,w,o,p,n,v,d
-e,x,y,n,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,n,y,d
-p,x,y,g,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,y,g
-e,f,y,e,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,k,y,d
-p,x,f,p,f,c,f,c,n,p,e,b,s,s,w,w,p,w,o,p,n,s,d
-p,x,f,g,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,v,p
-e,f,f,e,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,n,y,d
-e,f,f,e,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,k,y,d
-p,x,f,g,f,c,f,c,n,u,e,b,s,s,w,w,p,w,o,p,n,s,d
-p,x,f,g,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,v,g
-e,f,y,g,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,n,y,d
-p,x,f,g,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,v,d
-e,f,f,e,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,k,v,d
-e,f,y,n,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,k,v,d
-p,f,y,y,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,y,d
-e,f,y,e,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,k,y,d
-e,f,y,e,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,k,v,d
-e,f,y,n,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,n,y,d
-e,x,f,n,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,k,v,d
-p,f,y,g,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,v,d
-e,f,y,g,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,n,v,d
-e,f,f,e,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,k,y,d
-e,f,y,n,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,n,v,d
-e,x,y,g,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,n,y,d
-e,x,y,n,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,k,v,d
-p,x,f,g,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,v,d
-p,x,s,w,f,c,f,w,n,g,e,b,s,s,w,w,p,w,o,p,n,v,d
-p,f,f,y,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,v,p
-e,f,y,n,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,n,v,d
-e,f,y,e,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,k,y,d
-e,f,f,e,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,n,y,d
-e,f,f,e,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,n,y,d
-e,f,y,e,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,n,v,d
-e,f,y,n,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,k,y,d
-e,f,y,e,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,k,v,d
-p,x,y,g,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,y,g
-e,f,y,e,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,n,y,d
-e,f,y,n,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,n,y,d
-p,x,f,g,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,v,d
-p,x,f,w,f,c,f,w,n,p,e,b,s,s,w,w,p,w,o,p,k,s,d
-p,x,f,g,f,c,f,w,n,p,e,b,s,s,w,w,p,w,o,p,k,v,d
-e,x,y,n,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,n,y,d
-p,f,f,y,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,v,p
-p,x,s,g,f,c,f,w,n,n,e,b,s,s,w,w,p,w,o,p,n,v,d
-e,f,y,e,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,n,v,d
-e,f,y,e,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,n,y,d
-e,f,f,e,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,k,v,d
-p,x,f,y,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,v,d
-e,f,y,n,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,n,v,d
-e,f,f,e,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,n,v,d
-e,f,y,e,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,n,v,d
-p,x,f,y,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,v,p
-p,x,f,g,f,c,f,c,n,n,e,b,s,s,w,w,p,w,o,p,n,v,d
-e,f,f,e,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,k,v,d
-p,x,s,g,f,c,f,c,n,g,e,b,s,s,w,w,p,w,o,p,n,s,d
-p,x,f,w,f,c,f,c,n,u,e,b,s,s,w,w,p,w,o,p,n,v,d
-p,x,f,g,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,y,g
-e,f,y,g,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,n,v,d
-e,f,f,e,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,k,v,d
-p,f,f,y,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,y,p
-e,f,f,e,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,k,y,d
-e,f,y,e,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,n,v,d
-e,f,f,g,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,n,v,d
-e,f,y,e,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,n,y,d
-e,f,f,g,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,k,v,d
-e,f,y,n,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,k,y,d
-e,f,y,e,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,k,v,d
-e,f,y,n,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,n,v,d
-e,f,f,e,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,n,y,d
-e,f,y,e,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,n,y,d
-e,f,f,g,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,k,y,d
-e,x,f,g,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,n,y,d
-e,f,y,n,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,n,y,d
-e,f,y,n,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,n,v,d
-e,x,f,e,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,k,y,d
-e,f,f,n,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,k,y,d
-p,x,f,g,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,y,p
-e,f,y,e,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,n,y,d
-e,x,y,g,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,k,y,d
-e,f,y,n,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,n,v,d
-e,f,y,n,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,k,y,d
-e,f,y,g,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,n,y,d
-p,x,f,g,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,v,d
-e,f,f,e,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,n,v,d
-p,f,f,g,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,v,d
-e,f,y,g,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,n,y,d
-e,f,y,g,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,n,v,d
-e,f,f,g,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,k,y,d
-p,f,f,y,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,v,p
-e,x,y,n,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,n,v,d
-e,f,y,g,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,n,y,d
-p,f,f,g,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,v,d
-e,f,y,n,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,k,v,d
-p,x,f,g,f,c,f,w,n,u,e,b,s,s,w,w,p,w,o,p,k,s,d
-e,f,y,e,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,n,y,d
-p,x,s,w,f,c,f,w,n,g,e,b,s,s,w,w,p,w,o,p,k,s,d
-e,f,y,n,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,k,v,d
-p,x,s,w,f,c,f,c,n,n,e,b,s,s,w,w,p,w,o,p,n,v,d
-e,f,y,e,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,n,y,d
-e,f,f,g,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,n,v,d
-e,f,f,e,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,k,y,d
-e,f,y,e,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,n,y,d
-p,x,s,g,f,c,f,c,n,p,e,b,s,s,w,w,p,w,o,p,k,s,d
-e,f,y,e,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,k,y,d
-e,f,y,e,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,k,v,d
-p,x,f,p,f,c,f,w,n,n,e,b,s,s,w,w,p,w,o,p,k,v,d
-e,f,f,e,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,k,y,d
-p,x,s,p,f,c,f,c,n,u,e,b,s,s,w,w,p,w,o,p,n,v,d
-e,f,f,e,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,k,y,d
-p,f,f,g,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,y,g
-e,f,y,g,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,n,y,d
-p,x,f,g,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,y,g
-e,f,y,n,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,k,v,d
-e,f,y,e,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,n,v,d
-e,f,y,n,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,k,v,d
-p,x,s,p,f,c,f,w,n,g,e,b,s,s,w,w,p,w,o,p,k,s,d
-p,x,y,g,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,y,p
-e,f,f,e,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,n,v,d
-e,f,f,g,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,k,v,d
-e,f,y,g,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,k,v,d
-e,f,y,e,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,n,y,d
-e,f,y,e,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,k,v,d
-e,x,f,g,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,n,y,d
-p,x,f,g,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,y,d
-e,x,y,n,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,n,y,d
-e,f,y,e,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,k,y,d
-e,f,f,e,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,k,y,d
-p,x,f,w,f,c,f,c,n,n,e,b,s,s,w,w,p,w,o,p,k,v,d
-e,f,y,e,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,k,y,d
-e,f,y,g,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,n,y,d
-p,x,f,g,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,y,g
-e,f,y,g,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,n,y,d
-p,x,s,w,f,c,f,w,n,n,e,b,s,s,w,w,p,w,o,p,k,v,d
-e,f,f,e,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,k,v,d
-p,x,s,w,f,c,f,c,n,n,e,b,s,s,w,w,p,w,o,p,k,s,d
-p,x,f,g,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,v,d
-p,x,f,g,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,y,d
-e,f,y,g,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,n,v,d
-e,f,y,g,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,n,v,d
-e,f,y,e,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,n,y,d
-e,f,y,e,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,k,y,d
-p,x,f,g,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,v,p
-e,f,f,e,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,n,y,d
-e,f,f,e,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,k,y,d
-p,x,y,g,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,v,g
-e,f,f,e,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,k,y,d
-p,x,f,g,f,c,f,w,n,p,e,b,s,s,w,w,p,w,o,p,n,s,d
-e,f,y,g,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,k,y,d
-e,f,y,e,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,n,y,d
-e,f,y,e,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,n,y,d
-e,f,y,g,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,k,y,d
-e,x,f,e,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,n,y,d
-e,f,y,g,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,k,y,d
-p,x,s,p,f,c,f,c,n,p,e,b,s,s,w,w,p,w,o,p,k,s,d
-e,f,y,n,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,n,v,d
-p,x,f,g,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,y,g
-e,f,y,e,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,k,v,d
-e,f,y,g,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,n,v,d
-e,f,f,e,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,n,v,d
-p,x,s,w,f,c,f,w,n,u,e,b,s,s,w,w,p,w,o,p,n,v,d
-p,x,f,w,f,c,f,w,n,n,e,b,s,s,w,w,p,w,o,p,n,v,d
-e,x,y,n,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,n,v,d
-e,f,f,e,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,k,v,d
-e,f,y,e,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,k,v,d
-e,x,y,g,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,n,y,d
-p,x,f,g,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,v,d
-p,x,f,w,f,c,f,w,n,n,e,b,s,s,w,w,p,w,o,p,k,v,d
-e,f,y,n,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,k,y,d
-e,f,f,e,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,k,v,d
-p,x,f,w,f,c,f,w,n,u,e,b,s,s,w,w,p,w,o,p,k,v,d
-p,x,s,g,f,c,f,w,n,p,e,b,s,s,w,w,p,w,o,p,k,s,d
-e,x,y,e,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,k,v,d
-p,x,f,g,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,v,d
-p,x,f,g,f,c,f,c,n,p,e,b,s,s,w,w,p,w,o,p,n,v,d
-p,x,f,g,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,v,d
-p,x,s,g,f,c,f,c,n,n,e,b,s,s,w,w,p,w,o,p,n,v,d
-e,x,f,g,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,k,v,d
-p,x,f,p,f,c,f,c,n,u,e,b,s,s,w,w,p,w,o,p,k,s,d
-e,f,f,e,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,n,v,d
-p,x,s,p,f,c,f,c,n,n,e,b,s,s,w,w,p,w,o,p,k,v,d
-p,x,f,g,f,c,f,w,n,p,e,b,s,s,w,w,p,w,o,p,k,s,d
-e,f,y,e,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,k,y,d
-p,x,f,g,f,c,f,c,n,n,e,b,s,s,w,w,p,w,o,p,k,s,d
-p,x,s,g,f,c,f,w,n,n,e,b,s,s,w,w,p,w,o,p,k,v,d
-e,f,y,g,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,n,y,d
-e,f,y,n,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,n,y,d
-p,x,f,g,f,c,f,w,n,g,e,b,s,s,w,w,p,w,o,p,n,s,d
-e,f,f,e,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,k,v,d
-p,x,s,w,f,c,f,c,n,n,e,b,s,s,w,w,p,w,o,p,n,s,d
-e,f,y,n,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,k,v,d
-p,x,f,g,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,v,d
-p,f,y,g,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,y,g
-e,f,y,g,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,k,v,d
-p,x,f,g,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,v,g
-e,f,y,g,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,n,v,d
-p,x,f,g,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,y,g
-p,f,f,g,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,y,p
-e,f,f,e,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,k,v,d
-e,f,f,g,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,k,v,d
-p,x,f,w,f,c,f,c,n,p,e,b,s,s,w,w,p,w,o,p,n,v,d
-e,f,f,g,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,n,y,d
-p,x,f,g,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,y,p
-p,x,y,y,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,v,d
-p,f,y,g,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,y,p
-p,x,f,y,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,v,g
-p,x,f,w,f,c,f,w,n,g,e,b,s,s,w,w,p,w,o,p,n,v,d
-p,x,y,y,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,y,d
-p,x,y,y,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,v,d
-p,x,f,y,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,v,p
-p,f,f,y,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,v,d
-p,f,f,y,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,y,d
-e,f,y,g,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,k,y,d
-p,x,y,g,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,y,g
-p,f,f,y,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,y,p
-p,f,f,y,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,y,g
-e,x,f,g,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,k,y,d
-e,f,y,n,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,n,v,d
-e,f,y,e,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,n,v,d
-p,x,y,y,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,v,g
-p,f,y,g,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,v,g
-p,f,y,g,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,v,d
-e,f,y,e,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,k,y,d
-p,f,y,g,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,v,g
-e,f,f,e,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,k,v,d
-e,f,f,g,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,k,v,d
-e,f,f,g,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,n,v,d
-p,x,f,g,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,y,d
-p,x,y,y,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,y,g
-p,f,f,y,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,y,g
-e,f,y,e,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,n,v,d
-p,x,f,y,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,y,d
-p,f,y,g,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,y,p
-p,f,f,y,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,v,g
-p,f,y,y,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,v,d
-p,x,y,g,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,y,p
-p,f,f,y,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,v,p
-p,x,s,p,f,c,f,c,n,g,e,b,s,s,w,w,p,w,o,p,n,s,d
-p,x,y,y,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,v,g
-e,f,f,e,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,k,v,d
-e,f,y,e,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,k,y,d
-p,x,f,p,f,c,f,c,n,n,e,b,s,s,w,w,p,w,o,p,n,v,d
-p,x,f,y,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,v,g
-p,f,y,g,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,v,g
-e,x,y,g,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,k,y,d
-e,f,f,e,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,n,y,d
-p,f,y,g,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,v,d
-e,f,y,g,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,n,v,d
-p,x,f,y,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,v,d
-p,f,y,g,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,y,g
-e,f,f,e,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,n,v,d
-p,x,y,y,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,v,g
-p,f,f,y,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,y,g
-p,x,y,y,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,v,g
-p,f,f,g,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,y,g
-p,x,s,p,f,c,f,w,n,n,e,b,s,s,w,w,p,w,o,p,n,s,d
-p,x,f,g,f,c,f,c,n,g,e,b,s,s,w,w,p,w,o,p,k,s,d
-p,x,y,g,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,v,p
-p,x,y,y,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,y,p
-p,f,f,g,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,v,d
-e,f,y,g,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,n,v,d
-p,x,s,g,t,f,f,c,b,w,t,b,f,s,w,w,p,w,o,p,h,v,u
-p,x,y,y,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,y,d
-e,f,f,g,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,k,v,d
-p,f,f,y,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,y,d
-p,x,y,g,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,y,p
-p,f,f,y,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,v,p
-p,x,y,g,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,y,g
-p,f,y,g,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,y,g
-p,f,y,y,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,y,p
-p,x,f,y,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,y,p
-p,x,s,w,f,c,f,c,n,g,e,b,s,s,w,w,p,w,o,p,k,v,d
-p,x,y,g,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,v,p
-p,x,f,g,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,y,p
-e,f,y,n,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,n,v,d
-e,f,f,g,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,k,v,d
-p,x,f,y,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,y,p
-p,x,s,p,f,c,f,c,n,u,e,b,s,s,w,w,p,w,o,p,n,s,d
-p,f,f,g,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,v,g
-p,x,f,g,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,v,g
-p,x,s,w,f,c,f,c,n,u,e,b,s,s,w,w,p,w,o,p,k,v,d
-e,f,f,e,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,n,v,d
-p,f,y,g,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,v,g
-p,x,f,p,f,c,f,w,n,p,e,b,s,s,w,w,p,w,o,p,k,v,d
-p,f,s,w,t,f,f,c,b,p,t,b,f,s,w,w,p,w,o,p,h,v,g
-p,x,y,y,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,v,p
-e,x,y,b,t,n,f,c,b,e,e,?,s,s,e,w,p,w,t,e,w,c,w
-p,f,f,g,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,y,g
-p,x,f,g,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,y,g
-p,x,f,y,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,y,g
-p,f,f,y,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,y,g
-e,f,f,g,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,n,y,d
-p,x,f,y,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,v,d
-p,x,y,g,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,v,d
-p,f,f,g,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,y,g
-p,x,f,g,f,c,f,w,n,u,e,b,s,s,w,w,p,w,o,p,n,s,d
-p,x,f,g,f,c,f,c,n,u,e,b,s,s,w,w,p,w,o,p,n,v,d
-p,x,y,g,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,v,g
-p,x,s,g,t,f,f,c,b,h,t,b,s,s,w,w,p,w,o,p,h,s,g
-p,x,y,g,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,v,g
-e,f,y,g,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,k,y,d
-p,f,f,y,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,v,d
-e,f,f,e,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,k,v,d
-p,f,y,g,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,y,p
-e,f,y,n,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,k,v,d
-e,f,f,e,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,k,y,d
-p,x,y,g,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,y,p
-p,x,y,g,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,y,d
-e,f,f,e,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,n,v,d
-p,f,f,g,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,v,p
-e,f,f,g,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,k,y,d
-e,f,y,e,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,k,y,d
-e,f,y,e,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,n,v,d
-p,f,f,g,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,y,p
-e,x,f,e,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,k,v,d
-p,f,y,g,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,v,d
-p,x,y,y,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,v,p
-p,f,y,y,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,v,d
-p,x,f,y,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,v,p
-p,x,y,y,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,v,d
-e,x,f,g,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,n,v,d
-e,x,y,g,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,n,v,d
-p,f,y,g,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,v,g
-p,f,f,y,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,y,d
-p,x,y,g,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,y,p
-p,x,y,e,f,y,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,p
-p,x,y,g,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,y,p
-p,x,y,g,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,v,g
-p,x,s,g,f,c,f,w,n,u,e,b,s,s,w,w,p,w,o,p,n,v,d
-p,f,f,g,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,v,g
-p,f,s,b,t,f,f,c,b,p,t,b,s,f,w,w,p,w,o,p,h,s,g
-p,x,y,g,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,v,d
-p,x,y,y,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,y,d
-p,x,s,w,f,c,f,w,n,u,e,b,s,s,w,w,p,w,o,p,k,s,d
-p,x,y,y,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,v,d
-e,f,f,e,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,k,v,d
-p,x,y,g,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,y,g
-p,x,f,y,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,v,d
-p,x,f,y,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,v,p
-p,f,f,g,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,v,g
-p,x,f,y,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,y,d
-e,f,f,g,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,k,v,d
-p,x,f,g,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,y,d
-e,f,y,n,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,n,y,d
-p,f,y,g,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,v,d
-p,f,f,g,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,y,p
-p,f,f,g,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,v,d
-p,x,s,w,f,c,f,w,n,u,e,b,s,s,w,w,p,w,o,p,k,v,d
-p,x,y,g,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,y,d
-e,f,y,n,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,k,v,d
-p,x,f,w,f,c,f,c,n,u,e,b,s,s,w,w,p,w,o,p,k,s,d
-p,x,f,y,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,v,d
-p,x,f,y,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,y,d
-p,x,f,p,f,c,f,w,n,u,e,b,s,s,w,w,p,w,o,p,n,v,d
-p,f,f,y,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,y,d
-p,f,f,g,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,v,p
-e,f,y,e,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,k,v,d
-p,x,y,y,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,v,p
-p,f,f,y,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,v,p
-p,x,f,g,f,c,f,c,n,g,e,b,s,s,w,w,p,w,o,p,n,s,d
-p,x,s,p,f,c,f,w,n,u,e,b,s,s,w,w,p,w,o,p,k,v,d
-e,x,f,e,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,n,v,d
-p,x,y,y,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,v,g
-e,f,f,n,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,n,y,d
-e,f,y,e,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,k,y,d
-p,f,y,y,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,y,d
-p,x,f,y,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,y,g
-p,x,y,g,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,v,d
-p,f,f,g,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,y,d
-p,f,f,y,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,y,p
-e,f,y,g,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,n,y,d
-p,x,y,g,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,v,p
-p,x,y,y,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,v,p
-p,f,f,g,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,v,p
-p,f,f,y,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,y,p
-e,f,f,e,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,n,v,d
-e,f,y,n,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,n,v,d
-p,x,f,y,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,y,g
-e,f,y,u,f,n,f,c,n,h,e,?,s,f,w,w,p,w,o,f,h,y,d
-e,f,y,n,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,n,y,d
-p,f,y,y,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,v,g
-p,f,f,y,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,v,g
-p,x,y,g,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,y,d
-p,x,y,g,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,y,p
-p,f,f,y,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,v,p
-p,x,y,y,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,y,d
-p,x,f,y,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,v,p
-p,x,y,g,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,v,p
-p,x,f,y,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,y,p
-e,x,f,g,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,n,v,d
-p,f,y,g,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,y,d
-e,f,y,e,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,n,v,d
-p,x,f,g,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,v,d
-p,f,f,y,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,v,d
-e,f,y,n,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,n,y,d
-e,f,f,g,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,k,y,d
-p,f,y,g,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,v,g
-e,x,f,e,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,k,v,d
-e,f,f,e,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,n,y,d
-p,f,y,y,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,v,p
-p,x,f,w,f,c,f,c,n,u,e,b,s,s,w,w,p,w,o,p,k,v,d
-p,x,y,g,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,y,d
-p,x,y,e,f,y,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,d
-p,x,f,y,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,v,p
-p,f,y,g,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,y,d
-e,f,y,g,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,n,y,d
-p,x,y,n,f,f,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,l
-e,f,y,e,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,n,v,d
-p,b,s,b,t,n,f,c,b,g,e,b,s,s,w,w,p,w,t,p,r,v,m
-e,f,f,g,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,k,y,d
-p,f,f,g,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,v,g
-e,f,y,g,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,k,y,d
-p,f,f,y,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,y,p
-p,f,f,y,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,y,g
-p,f,f,g,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,v,g
-e,f,f,e,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,n,v,d
-p,x,f,g,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,y,p
-p,f,f,y,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,v,g
-p,x,f,y,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,v,d
-p,x,f,y,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,y,p
-p,f,f,y,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,v,g
-p,f,f,y,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,v,g
-p,x,y,g,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,v,p
-e,f,y,e,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,k,v,d
-p,f,f,g,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,v,d
-p,f,f,y,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,v,g
-e,f,f,n,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,k,v,d
-p,x,y,y,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,v,p
-p,x,y,y,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,v,p
-e,f,y,n,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,k,y,d
-p,f,y,g,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,y,p
-e,f,f,e,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,k,y,d
-p,x,y,y,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,v,g
-p,f,y,g,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,v,d
-p,x,y,y,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,v,d
-p,x,s,g,f,c,f,c,n,g,e,b,s,s,w,w,p,w,o,p,k,s,d
-e,f,f,g,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,k,v,d
-p,f,y,g,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,y,g
-p,x,f,g,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,y,d
-p,f,y,g,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,v,p
-e,f,y,n,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,k,v,d
-p,x,y,y,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,y,d
-p,f,f,g,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,y,d
-e,f,y,e,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,n,v,d
-e,f,y,e,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,k,v,d
-p,f,s,g,t,f,f,c,b,p,t,b,f,f,w,w,p,w,o,p,h,v,u
-p,x,f,g,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,y,p
-p,f,y,g,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,v,p
-p,x,y,g,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,v,p
-p,x,f,y,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,v,d
-p,f,y,g,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,v,d
-p,f,y,g,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,v,p
-e,f,y,e,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,n,y,d
-p,x,y,g,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,v,g
-p,f,y,g,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,v,g
-p,x,y,g,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,y,p
-e,f,f,e,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,n,v,d
-e,f,y,e,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,n,v,d
-e,f,y,e,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,n,y,d
-p,f,f,y,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,v,g
-p,f,f,g,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,y,p
-e,f,y,n,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,k,y,d
-p,x,y,g,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,y,g
-e,f,f,g,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,n,v,d
-p,x,f,y,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,v,p
-p,x,y,y,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,v,d
-p,x,y,y,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,v,d
-e,f,f,c,f,n,f,w,n,w,e,b,f,f,w,n,p,w,o,e,w,v,l
-p,x,f,g,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,v,d
-p,f,f,g,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,y,g
-p,x,y,g,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,v,g
-p,f,y,g,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,y,g
-p,x,s,b,t,f,f,c,b,w,t,b,f,s,w,w,p,w,o,p,h,s,u
-p,f,f,g,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,v,d
-p,x,y,g,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,v,g
-p,f,f,y,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,y,p
-p,f,f,g,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,v,p
-p,f,f,g,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,y,d
-e,f,f,g,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,k,y,d
-p,x,f,y,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,y,p
-p,x,f,y,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,y,p
-e,f,y,e,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,k,y,d
-p,f,y,g,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,v,p
-p,f,f,g,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,y,d
-p,x,y,y,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,y,d
-e,f,f,g,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,k,y,d
-p,x,f,g,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,v,g
-p,x,f,g,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,y,p
-e,f,y,n,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,n,y,d
-e,f,f,g,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,n,v,d
-p,x,y,g,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,v,d
-p,f,y,y,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,v,p
-p,f,f,y,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,v,d
-p,f,f,g,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,v,p
-p,f,f,g,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,y,p
-p,f,y,g,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,y,g
-p,f,s,b,t,f,f,c,b,w,t,b,f,f,w,w,p,w,o,p,h,s,u
-e,f,f,e,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,n,y,d
-p,x,y,n,f,y,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,d
-e,x,y,g,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,n,v,d
-p,x,f,g,f,c,f,w,n,n,e,b,s,s,w,w,p,w,o,p,k,v,d
-p,x,f,p,f,c,f,w,n,n,e,b,s,s,w,w,p,w,o,p,n,v,d
-p,x,y,n,f,y,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,l
-p,f,f,g,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,y,p
-p,x,f,y,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,v,g
-p,x,s,g,t,f,f,c,b,w,t,b,s,f,w,w,p,w,o,p,h,s,u
-p,f,y,g,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,y,p
-p,x,f,y,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,y,d
-p,f,y,g,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,y,p
-p,f,f,g,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,y,d
-p,f,y,g,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,y,d
-p,x,y,g,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,y,g
-p,f,f,g,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,y,g
-p,x,f,y,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,y,g
-p,f,f,y,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,v,g
-p,x,f,y,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,v,g
-p,x,f,y,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,y,g
-p,x,y,y,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,v,g
-p,f,s,g,t,f,f,c,b,h,t,b,s,s,w,w,p,w,o,p,h,v,u
-p,x,f,y,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,y,g
-p,f,f,y,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,y,g
-p,f,y,g,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,y,d
-p,f,f,y,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,y,d
-p,f,f,y,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,v,d
-p,x,y,g,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,v,p
-p,x,y,g,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,y,p
-e,f,y,e,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,n,v,d
-p,f,f,y,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,v,d
-p,f,f,g,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,y,d
-p,f,y,g,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,v,p
-p,x,f,y,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,v,g
-p,x,f,y,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,v,p
-p,x,f,g,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,y,d
-p,x,y,y,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,y,p
-p,f,s,w,t,f,f,c,b,p,t,b,f,f,w,w,p,w,o,p,h,s,u
-p,f,f,y,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,v,g
-p,f,f,y,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,v,d
-p,f,y,g,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,y,p
-p,f,f,y,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,y,d
-p,f,f,y,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,v,d
-p,f,f,g,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,v,p
-p,x,y,g,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,y,g
-p,x,f,y,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,y,p
-p,f,f,g,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,v,g
-p,x,f,y,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,y,d
-p,f,y,g,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,v,d
-p,x,y,y,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,v,d
-p,x,s,w,t,f,f,c,b,h,t,b,f,s,w,w,p,w,o,p,h,s,g
-p,f,f,g,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,y,g
-p,x,s,b,t,f,f,c,b,p,t,b,f,s,w,w,p,w,o,p,h,s,g
-p,f,y,g,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,v,d
-p,x,y,g,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,v,d
-p,x,f,y,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,y,g
-p,x,f,y,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,y,g
-p,x,y,y,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,v,g
-e,f,y,g,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,n,y,d
-p,f,y,y,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,y,p
-p,x,y,g,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,y,p
-p,f,f,y,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,v,g
-p,f,y,y,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,v,p
-p,f,f,y,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,y,d
-p,x,y,g,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,v,p
-p,f,f,g,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,y,g
-p,x,y,g,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,y,p
-p,x,f,p,f,c,f,c,n,p,e,b,s,s,w,w,p,w,o,p,k,s,d
-p,x,y,y,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,y,d
-p,x,f,y,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,y,g
-p,f,f,y,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,y,g
-p,x,y,g,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,v,g
-p,f,f,g,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,v,g
-p,x,y,g,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,v,d
-p,x,f,y,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,y,g
-p,x,f,y,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,v,p
-p,x,y,g,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,y,d
-p,x,y,y,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,v,d
-p,f,y,g,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,y,p
-p,f,y,g,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,v,d
-p,x,f,y,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,y,p
-e,k,y,n,f,n,f,w,n,w,e,b,f,f,w,n,p,w,o,e,w,v,l
-p,f,f,g,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,y,p
-p,x,y,y,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,y,g
-p,f,f,y,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,y,g
-p,x,y,g,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,v,d
-p,x,f,g,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,y,p
-p,f,f,y,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,v,g
-e,f,s,p,t,n,f,c,b,e,e,?,s,s,w,w,p,w,t,e,w,c,w
-p,f,f,y,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,v,p
-p,x,f,y,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,y,d
-p,x,y,g,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,y,d
-p,f,y,g,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,y,p
-p,x,f,y,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,v,d
-p,f,y,g,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,y,g
-p,f,y,g,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,v,g
-e,k,s,p,t,n,f,c,b,e,e,?,s,s,w,e,p,w,t,e,w,c,w
-p,x,f,y,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,v,d
-p,f,f,y,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,v,p
-p,f,f,g,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,y,p
-p,f,f,g,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,v,g
-p,x,f,y,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,y,p
-p,f,f,g,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,y,p
-p,x,y,g,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,y,g
-p,f,y,y,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,y,g
-p,f,f,g,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,y,d
-p,x,f,y,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,v,d
-p,f,f,g,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,v,g
-p,f,f,g,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,y,d
-p,f,f,y,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,y,g
-e,f,y,g,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,n,y,d
-p,f,y,g,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,y,d
-p,f,y,g,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,v,d
-p,f,y,g,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,v,g
-p,x,y,y,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,v,g
-p,x,y,g,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,y,g
-p,x,y,y,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,y,p
-p,f,f,y,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,y,d
-p,x,f,y,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,v,g
-p,x,y,y,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,y,p
-p,f,f,g,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,v,g
-p,x,f,y,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,v,d
-p,f,f,g,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,v,g
-p,f,y,g,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,y,d
-p,f,f,g,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,y,g
-p,f,y,y,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,v,g
-p,f,f,g,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,y,g
-p,x,f,y,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,y,p
-p,f,y,g,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,y,g
-p,f,y,g,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,y,g
-p,f,y,g,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,v,p
-e,x,y,r,f,n,f,c,n,p,e,?,s,f,w,w,p,w,o,f,h,v,d
-p,f,y,y,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,y,g
-p,x,f,y,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,y,d
-p,x,y,n,f,s,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,p
-p,f,y,y,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,v,d
-p,k,y,n,f,n,f,c,n,w,e,?,k,y,w,n,p,w,o,e,w,v,d
-p,f,f,y,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,y,d
-p,x,y,g,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,v,d
-p,x,y,g,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,y,g
-p,f,y,g,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,y,g
-p,x,f,y,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,v,d
-p,x,f,g,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,v,d
-p,x,y,g,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,v,d
-p,x,y,g,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,y,g
-p,x,y,g,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,y,g
-e,f,y,e,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,k,y,d
-p,f,f,y,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,y,p
-p,x,y,y,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,v,g
-p,f,f,y,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,v,g
-p,x,f,y,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,v,p
-p,f,f,g,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,y,g
-p,f,f,g,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,v,g
-p,x,y,y,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,y,g
-p,f,s,b,t,f,f,c,b,w,t,b,s,s,w,w,p,w,o,p,h,s,u
-p,x,y,g,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,v,g
-p,x,y,y,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,v,p
-p,f,f,g,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,v,p
-p,f,y,g,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,y,d
-p,f,f,y,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,v,d
-p,f,y,g,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,y,g
-p,x,y,g,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,v,g
-e,x,s,e,t,n,f,c,b,e,e,?,s,s,w,e,p,w,t,e,w,c,w
-p,f,f,g,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,v,d
-p,f,f,y,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,y,d
-p,f,y,g,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,y,d
-p,x,y,y,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,y,p
-p,f,y,g,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,v,d
-p,x,f,y,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,y,g
-p,b,y,w,t,n,f,w,n,w,e,b,s,s,w,w,p,w,o,p,w,c,l
-p,f,s,b,t,f,f,c,b,h,t,b,s,s,w,w,p,w,o,p,h,v,u
-p,x,y,y,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,y,p
-p,f,y,g,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,y,d
-p,f,y,g,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,v,p
-p,f,y,y,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,v,p
-p,f,f,g,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,y,d
-p,x,y,y,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,v,d
-p,x,f,y,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,v,g
-p,f,f,g,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,y,g
-p,x,s,w,t,f,f,c,b,w,t,b,f,f,w,w,p,w,o,p,h,s,u
-e,f,y,g,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,n,v,d
-p,x,y,n,f,y,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,d
-p,f,y,g,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,y,d
-p,f,f,g,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,y,g
-e,f,y,e,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,k,v,d
-e,x,y,b,t,n,f,c,b,e,e,?,s,s,w,e,p,w,t,e,w,c,w
-p,x,y,y,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,y,g
-p,x,y,y,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,y,d
-p,x,y,y,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,v,p
-p,x,y,y,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,y,g
-p,f,f,y,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,y,g
-p,f,y,g,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,y,g
-p,x,f,g,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,y,g
-p,f,y,y,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,v,g
-p,x,y,g,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,v,g
-p,x,y,y,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,y,g
-p,f,y,g,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,y,p
-p,f,y,y,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,y,g
-p,x,y,g,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,y,d
-p,f,y,g,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,v,d
-p,f,y,g,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,y,p
-e,x,y,u,f,n,f,c,n,h,e,?,s,f,w,w,p,w,o,f,h,y,d
-p,x,f,g,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,v,g
-p,f,f,y,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,v,g
-p,f,f,y,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,y,g
-p,x,f,y,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,v,g
-p,f,f,y,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,v,g
-p,f,f,g,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,v,g
-p,f,y,g,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,y,d
-p,f,f,g,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,v,g
-p,f,f,y,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,y,d
-p,f,y,y,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,v,p
-p,f,f,g,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,v,p
-p,f,y,g,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,y,g
-p,f,f,y,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,y,d
-e,f,f,e,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,n,y,d
-p,f,s,b,t,f,f,c,b,h,t,b,s,f,w,w,p,w,o,p,h,v,u
-p,f,y,g,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,v,d
-p,f,f,g,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,v,p
-p,x,y,y,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,y,g
-p,f,f,y,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,y,p
-p,f,y,y,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,v,d
-e,f,y,n,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,k,v,d
-p,f,y,y,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,y,g
-e,x,s,b,t,n,f,c,b,e,e,?,s,s,w,w,p,w,t,e,w,c,w
-p,x,y,y,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,y,g
-p,x,f,y,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,y,d
-p,x,y,y,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,y,g
-p,f,y,g,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,v,p
-e,f,f,e,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,k,y,d
-p,f,f,y,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,v,d
-p,f,f,y,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,v,g
-p,x,y,y,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,v,d
-p,x,f,y,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,v,g
-p,x,y,n,f,y,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,d
-p,f,f,y,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,v,p
-p,x,y,y,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,v,g
-p,f,f,y,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,v,p
-p,f,f,y,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,v,d
-p,f,y,g,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,v,p
-p,f,y,g,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,v,g
-p,f,y,g,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,v,d
-p,f,f,g,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,v,g
-p,x,s,g,f,c,f,c,n,p,e,b,s,s,w,w,p,w,o,p,k,v,d
-p,x,y,y,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,y,g
-p,f,s,g,t,f,f,c,b,h,t,b,f,f,w,w,p,w,o,p,h,s,u
-p,x,y,g,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,v,g
-p,x,y,y,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,y,p
-p,x,y,y,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,y,d
-e,f,y,g,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,k,y,d
-p,f,f,g,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,y,d
-p,f,y,y,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,v,g
-p,f,y,g,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,y,p
-p,x,y,y,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,y,g
-p,x,y,g,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,y,d
-p,x,f,y,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,y,p
-e,f,y,n,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,n,v,d
-p,x,y,g,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,y,g
-p,f,f,y,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,v,g
-p,x,y,g,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,v,p
-p,x,y,y,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,y,g
-p,f,f,g,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,y,p
-p,f,f,g,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,y,g
-p,x,y,y,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,y,g
-e,k,y,b,t,n,f,c,b,e,e,?,s,s,e,e,p,w,t,e,w,c,w
-p,x,y,y,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,y,p
-p,x,y,n,f,s,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,d
-p,f,y,g,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,y,p
-p,x,y,g,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,y,p
-p,f,y,y,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,v,g
-p,f,y,g,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,v,d
-p,f,f,g,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,y,g
-p,f,y,y,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,y,g
-p,x,f,y,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,y,d
-p,x,s,w,t,f,f,c,b,p,t,b,f,f,w,w,p,w,o,p,h,v,u
-p,x,y,y,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,v,g
-p,f,f,y,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,y,p
-p,x,f,y,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,y,g
-p,x,y,y,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,v,p
-p,f,y,y,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,v,g
-p,x,y,y,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,y,g
-p,x,y,y,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,v,p
-p,f,y,g,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,y,p
-p,f,f,y,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,y,p
-p,f,f,g,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,v,p
-e,f,y,n,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,k,y,d
-p,x,y,y,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,v,d
-p,x,y,g,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,v,p
-p,f,y,g,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,v,g
-p,f,y,g,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,v,g
-p,x,y,y,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,y,d
-p,f,f,g,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,v,p
-p,x,y,g,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,v,d
-p,x,y,y,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,v,d
-p,x,f,y,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,y,d
-p,f,f,y,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,y,d
-e,f,f,c,f,n,f,w,n,w,e,b,s,f,w,n,p,w,o,e,w,v,l
-p,f,y,g,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,v,g
-p,x,y,g,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,y,p
-p,f,f,y,f,n,f,c,n,w,e,?,k,y,w,y,p,w,o,e,w,v,d
-p,f,f,y,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,y,p
-p,x,s,g,t,f,f,c,b,p,t,b,f,s,w,w,p,w,o,p,h,v,g
-e,f,s,n,t,n,f,c,b,e,e,?,s,s,w,w,p,w,t,e,w,c,w
-p,x,y,n,f,y,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,p
-p,x,s,g,f,c,f,w,n,u,e,b,s,s,w,w,p,w,o,p,k,v,d
-p,x,f,y,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,y,d
-p,f,y,g,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,v,g
-p,f,y,g,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,y,d
-p,x,y,y,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,v,g
-p,x,y,y,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,v,g
-p,x,y,g,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,y,d
-p,f,y,g,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,y,d
-p,f,y,g,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,y,g
-p,f,f,y,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,y,g
-p,x,f,y,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,v,d
-p,f,f,y,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,v,p
-p,f,f,g,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,y,p
-p,f,f,y,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,y,p
-p,f,y,y,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,y,d
-p,f,f,y,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,v,p
-p,f,f,y,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,v,d
-p,x,y,g,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,v,g
-p,x,y,g,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,v,g
-p,x,y,g,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,v,d
-e,x,s,b,t,n,f,c,b,w,e,?,s,s,e,w,p,w,t,e,w,c,w
-p,f,f,g,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,v,d
-p,x,f,y,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,v,d
-p,x,y,n,f,s,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,l
-p,x,y,g,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,y,d
-p,f,y,g,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,v,p
-p,f,y,g,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,y,d
-p,f,f,y,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,y,g
-p,x,y,y,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,v,d
-p,x,f,y,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,v,g
-p,f,f,g,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,y,p
-p,x,y,g,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,v,p
-p,f,f,y,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,y,g
-p,x,f,y,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,y,g
-e,x,y,u,f,n,f,c,n,h,e,?,s,f,w,w,p,w,o,f,h,v,d
-e,f,y,b,t,n,f,c,b,e,e,?,s,s,e,e,p,w,t,e,w,c,w
-p,f,y,y,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,y,d
-p,x,y,y,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,y,p
-p,x,y,g,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,v,g
-p,f,f,g,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,y,d
-p,f,y,g,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,v,g
-p,f,f,g,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,y,g
-p,f,y,g,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,v,g
-p,x,f,y,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,y,g
-p,f,f,y,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,v,d
-p,f,f,g,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,v,d
-p,f,y,y,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,v,g
-p,f,y,y,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,y,g
-p,f,f,g,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,v,g
-p,f,s,b,t,f,f,c,b,w,t,b,s,f,w,w,p,w,o,p,h,v,g
-p,f,s,w,t,n,f,c,b,g,e,b,s,s,w,w,p,w,t,p,r,v,m
-p,x,y,y,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,y,p
-p,f,f,y,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,v,p
-p,f,y,g,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,v,g
-p,x,y,y,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,v,d
-p,x,f,y,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,v,p
-p,x,f,y,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,y,d
-e,f,f,g,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,n,v,d
-e,x,y,u,f,n,f,c,n,u,e,?,s,f,w,w,p,w,o,f,h,v,d
-p,x,y,g,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,y,d
-p,f,f,y,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,v,g
-p,x,y,y,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,v,d
-p,f,y,g,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,v,d
-p,x,y,y,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,y,p
-p,x,f,y,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,v,d
-p,x,y,y,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,y,p
-p,f,f,y,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,v,g
-p,x,y,g,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,y,p
-p,f,y,g,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,y,g
-p,f,y,g,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,v,p
-p,x,f,y,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,y,p
-e,f,f,e,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,n,v,d
-p,x,y,y,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,y,d
-p,f,f,g,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,v,d
-p,x,y,g,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,y,p
-p,f,y,g,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,v,p
-p,f,f,g,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,y,p
-p,f,f,g,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,y,d
-p,f,f,g,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,v,g
-p,f,y,g,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,y,d
-p,f,f,y,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,v,d
-p,x,f,y,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,y,d
-p,x,y,g,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,v,p
-p,x,y,g,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,y,p
-p,x,f,w,f,c,f,c,n,g,e,b,s,s,w,w,p,w,o,p,n,v,d
-p,f,f,g,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,y,d
-p,x,f,y,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,v,p
-p,f,y,g,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,v,d
-p,x,f,y,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,v,g
-p,f,y,g,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,v,d
-p,f,y,g,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,v,d
-p,f,f,y,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,y,p
-p,x,y,g,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,v,g
-p,f,y,y,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,v,g
-p,f,f,y,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,y,p
-p,x,f,y,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,v,d
-p,f,f,y,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,v,g
-p,x,y,y,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,y,p
-p,f,f,y,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,y,d
-p,x,f,y,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,v,g
-p,f,y,g,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,y,d
-p,f,f,g,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,y,d
-p,f,f,y,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,v,g
-p,f,f,g,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,v,d
-p,f,f,g,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,y,g
-p,f,f,y,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,v,d
-p,x,y,y,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,y,d
-p,x,y,y,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,v,d
-p,f,f,y,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,y,d
-p,x,y,y,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,v,d
-p,x,y,y,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,v,g
-p,x,y,n,f,y,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,d
-e,f,y,b,t,n,f,c,b,w,e,?,s,s,w,w,p,w,t,e,w,c,w
-p,x,f,y,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,v,g
-p,x,y,y,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,y,d
-p,f,y,g,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,y,d
-p,x,y,y,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,y,d
-p,f,y,g,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,v,p
-p,f,y,g,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,y,d
-p,f,f,g,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,v,p
-p,x,f,y,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,v,p
-p,f,f,y,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,y,g
-p,f,f,y,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,y,d
-p,f,y,g,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,v,p
-p,f,y,g,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,y,g
-p,f,f,g,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,y,g
-p,f,y,g,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,v,d
-p,f,f,g,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,v,d
-p,f,y,g,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,v,p
-p,x,f,g,f,c,f,c,n,u,e,b,s,s,w,w,p,w,o,p,k,s,d
-p,f,s,b,t,f,f,c,b,w,t,b,f,f,w,w,p,w,o,p,h,v,g
-e,f,f,e,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,k,v,d
-p,x,y,y,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,v,p
-p,x,y,g,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,y,d
-p,f,y,g,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,v,g
-e,f,f,g,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,k,v,d
-p,x,y,y,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,y,d
-p,x,y,y,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,v,p
-p,f,f,y,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,y,p
-p,x,f,y,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,v,p
-p,x,f,y,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,y,d
-p,x,y,g,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,v,g
-p,f,s,w,t,f,f,c,b,w,t,b,s,f,w,w,p,w,o,p,h,s,u
-p,x,y,g,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,v,d
-p,x,f,y,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,v,g
-p,f,y,g,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,y,p
-p,x,y,g,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,v,p
-p,x,f,y,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,v,d
-e,f,y,u,f,n,f,c,n,h,e,?,s,f,w,w,p,w,o,f,h,v,d
-p,f,f,y,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,y,d
-p,f,y,g,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,v,g
-p,x,y,y,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,y,g
-p,f,f,g,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,y,g
-e,f,y,n,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,n,y,d
-p,x,f,y,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,v,p
-p,f,f,g,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,y,d
-p,x,y,g,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,y,p
-p,f,f,y,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,y,g
-p,x,f,y,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,v,d
-p,x,y,y,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,v,p
-p,x,f,y,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,v,p
-p,f,y,g,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,v,g
-p,x,s,w,t,f,f,c,b,h,t,b,f,f,w,w,p,w,o,p,h,s,u
-e,f,y,e,t,n,f,c,b,w,e,?,s,s,w,w,p,w,t,e,w,c,w
-e,x,y,u,f,n,f,c,n,w,e,?,s,f,w,w,p,w,o,f,h,y,d
-p,x,y,y,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,v,g
-p,x,y,g,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,y,d
-p,x,f,y,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,y,p
-p,f,f,g,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,y,d
-p,x,y,g,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,y,p
-p,f,f,g,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,y,g
-p,f,f,y,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,y,g
-p,x,y,g,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,y,p
-p,f,y,g,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,y,p
-e,f,s,p,t,n,f,c,b,w,e,?,s,s,w,e,p,w,t,e,w,c,w
-p,f,f,g,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,y,p
-p,x,y,y,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,y,p
-p,x,y,g,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,y,p
-p,f,f,y,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,v,g
-p,x,f,y,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,y,d
-p,f,y,g,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,y,d
-p,f,y,y,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,y,p
-p,x,y,g,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,y,g
-p,x,y,g,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,y,d
-p,f,f,y,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,y,p
-p,f,f,g,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,y,d
-p,x,y,g,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,y,p
-p,f,f,y,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,v,p
-p,f,y,g,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,v,d
-p,x,y,g,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,y,g
-p,f,f,g,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,v,d
-p,x,y,g,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,y,g
-p,f,f,g,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,v,d
-p,x,y,y,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,y,p
-p,x,f,y,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,y,g
-p,x,y,g,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,v,p
-p,f,f,y,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,v,p
-e,f,y,n,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,n,y,d
-p,f,f,g,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,y,d
-p,x,y,y,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,y,p
-p,x,s,g,t,f,f,c,b,h,t,b,f,f,w,w,p,w,o,p,h,s,g
-p,f,y,y,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,v,p
-p,x,y,y,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,v,p
-p,x,f,w,f,c,f,c,n,g,e,b,s,s,w,w,p,w,o,p,k,s,d
-p,x,f,y,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,y,d
-p,f,s,b,t,f,f,c,b,h,t,b,f,s,w,w,p,w,o,p,h,s,u
-p,x,y,y,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,y,g
-p,x,y,g,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,y,d
-p,f,f,y,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,y,g
-p,f,s,p,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,r,v,g
-p,f,s,b,t,f,f,c,b,p,t,b,f,f,w,w,p,w,o,p,h,v,u
-p,x,y,g,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,v,g
-p,x,y,g,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,y,g
-p,x,y,g,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,v,g
-p,f,f,g,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,y,p
-p,f,f,g,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,v,p
-p,x,f,y,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,y,p
-p,f,f,y,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,v,p
-e,f,y,u,f,n,f,c,n,u,e,?,s,f,w,w,p,w,o,f,h,v,d
-p,x,f,y,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,y,g
-e,f,f,e,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,n,v,d
-p,f,f,y,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,y,g
-p,x,f,y,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,v,g
-p,x,y,n,f,f,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,l
-p,x,y,g,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,y,g
-p,x,f,y,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,y,d
-p,f,f,g,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,y,g
-p,x,y,g,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,v,p
-p,x,y,g,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,y,g
-p,x,f,y,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,y,d
-p,f,f,y,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,y,g
-p,f,y,g,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,v,p
-p,f,y,g,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,y,g
-e,f,y,n,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,n,y,d
-p,x,y,y,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,v,g
-e,f,f,e,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,k,y,d
-p,x,f,y,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,y,d
-p,x,y,g,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,y,p
-p,f,y,g,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,y,p
-p,x,f,y,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,y,p
-p,f,f,g,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,v,d
-p,f,f,g,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,v,d
-p,f,y,g,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,y,p
-p,f,y,g,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,y,g
-p,x,f,y,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,v,d
-p,x,y,y,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,y,p
-p,f,f,y,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,v,d
-p,x,y,g,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,y,d
-p,f,f,y,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,y,p
-p,f,y,g,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,v,d
-p,x,f,p,f,c,f,w,n,p,e,b,s,s,w,w,p,w,o,p,k,s,d
-p,f,f,y,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,v,d
-p,x,y,g,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,v,d
-p,x,y,y,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,y,g
-p,x,y,g,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,v,d
-p,f,y,y,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,v,p
-p,x,y,g,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,y,p
-p,x,f,y,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,v,d
-p,f,y,g,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,v,d
-p,f,y,g,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,v,g
-p,x,y,y,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,y,d
-p,f,f,y,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,y,d
-p,x,y,g,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,v,d
-p,x,y,y,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,y,d
-p,x,y,g,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,v,d
-p,x,y,g,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,y,g
-p,x,f,y,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,v,g
-p,x,y,g,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,y,d
-p,f,f,g,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,y,p
-p,f,f,g,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,v,g
-p,x,y,g,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,v,p
-p,x,f,y,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,v,d
-p,x,y,g,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,y,d
-p,f,f,g,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,v,p
-p,x,y,y,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,v,p
-p,x,f,y,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,v,d
-p,f,y,g,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,y,d
-p,f,f,g,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,v,p
-e,x,s,p,t,n,f,c,b,e,e,?,s,s,w,w,p,w,t,e,w,c,w
-p,f,f,y,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,v,d
-p,x,s,p,f,c,f,w,n,g,e,b,s,s,w,w,p,w,o,p,k,v,d
-p,x,f,y,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,y,g
-p,f,f,g,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,y,p
-p,x,f,y,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,y,g
-e,f,f,e,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,n,y,d
-p,x,y,y,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,v,d
-p,x,f,y,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,y,g
-p,f,y,g,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,v,g
-p,f,f,y,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,y,g
-e,f,y,n,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,n,v,d
-p,x,y,y,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,y,p
-p,f,f,g,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,v,p
-p,x,y,y,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,v,p
-p,f,f,y,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,y,d
-p,f,y,y,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,y,d
-p,f,y,g,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,v,g
-p,f,f,g,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,v,g
-p,x,y,y,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,y,g
-p,f,y,g,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,y,p
-p,x,y,y,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,y,g
-e,f,f,g,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,n,y,d
-p,x,s,g,t,f,f,c,b,h,t,b,f,s,w,w,p,w,o,p,h,s,g
-p,x,y,n,f,f,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,p
-p,f,s,g,t,f,f,c,b,p,t,b,s,f,w,w,p,w,o,p,h,v,g
-p,f,y,g,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,y,g
-p,f,y,y,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,v,g
-p,f,y,g,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,v,d
-e,f,y,e,t,n,f,c,b,e,e,?,s,s,w,e,p,w,t,e,w,c,w
-p,f,y,g,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,v,p
-p,x,f,y,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,v,g
-p,f,f,g,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,y,d
-p,x,y,y,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,y,d
-p,f,f,y,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,y,g
-e,f,y,n,t,n,f,c,b,e,e,?,s,s,e,e,p,w,t,e,w,c,w
-p,f,y,g,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,v,g
-p,k,f,n,f,n,f,c,n,w,e,?,k,y,w,n,p,w,o,e,w,v,d
-p,f,f,g,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,v,d
-p,f,f,y,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,v,p
-p,x,y,n,f,s,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,p
-p,f,f,g,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,v,d
-p,f,f,g,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,y,p
-p,f,s,w,t,f,f,c,b,w,t,b,f,f,w,w,p,w,o,p,h,v,g
-p,f,f,y,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,v,d
-p,x,y,g,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,y,g
-p,x,f,y,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,y,g
-e,x,y,e,t,n,f,c,b,w,e,?,s,s,e,e,p,w,t,e,w,c,w
-p,f,y,y,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,v,d
-p,b,f,y,f,n,f,c,n,w,e,?,k,y,w,n,p,w,o,e,w,v,d
-p,f,y,y,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,v,d
-p,x,s,g,t,f,f,c,b,p,t,b,f,f,w,w,p,w,o,p,h,s,u
-p,f,y,y,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,v,d
-p,f,f,y,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,v,g
-e,k,y,e,t,n,f,c,b,e,e,?,s,s,w,w,p,w,t,e,w,c,w
-p,f,f,y,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,v,p
-p,f,f,y,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,v,p
-e,k,y,n,f,n,f,w,n,w,e,b,f,s,w,n,p,w,o,e,w,v,l
-p,x,s,w,f,c,f,c,n,p,e,b,s,s,w,w,p,w,o,p,n,v,d
-p,x,f,y,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,y,g
-p,x,f,y,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,v,p
-p,x,f,y,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,y,d
-p,f,y,y,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,v,p
-p,b,s,b,t,n,f,c,b,g,e,b,s,s,w,w,p,w,t,p,r,v,g
-p,x,s,w,t,f,f,c,b,h,t,b,f,f,w,w,p,w,o,p,h,v,g
-e,f,y,e,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,n,y,d
-p,b,s,w,t,n,f,c,b,g,e,b,s,s,w,w,p,w,t,p,r,v,m
-e,f,y,p,t,n,f,c,b,w,e,?,s,s,e,e,p,w,t,e,w,c,w
-p,x,s,w,t,f,f,c,b,p,t,b,f,f,w,w,p,w,o,p,h,v,g
-e,k,y,n,t,n,f,c,b,w,e,?,s,s,w,w,p,w,t,e,w,c,w
-p,x,y,y,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,y,p
-p,f,s,b,t,f,f,c,b,h,t,b,f,s,w,w,p,w,o,p,h,v,u
-p,f,y,g,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,v,d
-p,x,y,y,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,v,d
-p,x,y,g,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,v,d
-p,x,f,y,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,y,p
-p,x,y,y,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,v,d
-p,x,y,y,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,y,d
-p,x,f,y,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,y,p
-p,x,y,g,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,y,p
-p,f,f,g,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,v,p
-e,k,s,e,t,n,f,c,b,w,e,?,s,s,e,w,p,w,t,e,w,c,w
-e,f,y,n,t,n,f,c,b,e,e,?,s,s,e,w,p,w,t,e,w,c,w
-e,f,s,b,t,n,f,c,b,w,e,?,s,s,e,w,p,w,t,e,w,c,w
-p,x,y,y,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,v,d
-e,f,f,g,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,n,v,d
-p,f,f,y,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,y,p
-p,f,y,g,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,y,g
-p,x,s,g,t,f,f,c,b,p,t,b,f,f,w,w,p,w,o,p,h,v,g
-p,f,f,g,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,v,p
-p,x,f,y,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,y,d
-p,f,f,y,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,v,g
-p,x,y,n,f,f,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,l
-p,f,f,g,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,v,p
-e,f,y,n,t,n,f,c,b,e,e,?,s,s,w,w,p,w,t,e,w,c,w
-e,f,y,e,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,k,v,d
-p,f,y,g,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,y,g
-p,x,y,y,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,v,p
-p,f,f,g,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,y,d
-e,f,f,e,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,n,v,d
-p,x,y,y,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,v,p
-p,f,f,y,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,y,p
-p,f,f,g,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,v,p
-p,f,y,y,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,y,d
-p,x,y,g,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,v,d
-p,b,y,w,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,r,v,m
-p,f,f,y,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,y,p
-p,x,y,n,f,f,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,d
-p,f,f,g,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,y,g
-p,x,y,y,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,v,g
-p,f,y,y,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,y,p
-e,f,y,p,t,n,f,c,b,e,e,?,s,s,e,e,p,w,t,e,w,c,w
-p,x,s,w,t,f,f,c,b,h,t,b,s,s,w,w,p,w,o,p,h,s,g
-p,f,y,g,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,y,g
-p,x,y,g,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,v,p
-p,x,f,y,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,v,g
-p,f,s,b,t,f,f,c,b,w,t,b,f,s,w,w,p,w,o,p,h,s,u
-p,x,f,g,f,c,f,c,n,g,e,b,s,s,w,w,p,w,o,p,k,v,d
-p,x,f,y,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,y,p
-p,f,y,g,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,y,g
-p,f,y,y,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,y,p
-e,x,y,r,f,n,f,c,n,p,e,?,s,f,w,w,p,w,o,f,h,y,d
-e,f,y,w,f,n,f,c,n,h,e,?,s,f,w,w,p,w,o,f,h,v,d
-p,x,f,y,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,v,d
-p,f,y,y,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,y,p
-p,f,y,y,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,y,g
-p,x,y,e,f,y,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,p
-p,x,y,e,f,y,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,l
-p,x,y,g,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,y,d
-p,x,y,y,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,v,g
-p,f,f,g,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,y,d
-p,x,y,g,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,y,g
-p,x,y,y,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,v,d
-p,f,f,y,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,y,p
-e,f,s,e,t,n,f,c,b,w,e,?,s,s,w,e,p,w,t,e,w,c,w
-p,f,f,y,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,y,p
-p,x,y,y,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,y,p
-p,f,s,g,t,f,f,c,b,p,t,b,f,s,w,w,p,w,o,p,h,s,g
-p,f,f,y,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,v,g
-p,x,y,y,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,y,d
-p,x,f,y,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,y,p
-p,x,s,g,t,f,f,c,b,h,t,b,f,f,w,w,p,w,o,p,h,v,u
-p,f,y,g,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,y,g
-p,f,f,g,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,v,g
-p,x,s,w,f,c,f,c,n,n,e,b,s,s,w,w,p,w,o,p,k,v,d
-p,f,y,g,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,y,p
-e,f,y,e,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,n,y,d
-p,x,f,y,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,y,g
-p,x,y,y,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,y,p
-e,k,s,b,t,n,f,c,b,e,e,?,s,s,w,w,p,w,t,e,w,c,w
-p,f,f,y,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,v,g
-p,f,f,y,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,v,d
-p,x,y,y,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,y,g
-e,x,f,n,f,n,f,w,n,w,e,b,s,f,w,n,p,w,o,e,w,v,l
-e,x,y,e,t,n,f,c,b,e,e,?,s,s,e,e,p,w,t,e,w,c,w
-p,x,y,g,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,v,p
-p,x,f,y,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,v,g
-p,f,s,g,t,f,f,c,b,h,t,b,f,f,w,w,p,w,o,p,h,v,g
-p,x,y,y,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,y,g
-p,f,y,g,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,v,p
-p,x,y,g,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,v,g
-p,x,y,n,f,s,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,p
-e,k,s,b,t,n,f,c,b,w,e,?,s,s,e,w,p,w,t,e,w,c,w
-p,f,f,y,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,y,d
-p,x,f,g,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,v,p
-p,f,y,g,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,y,p
-p,x,f,y,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,v,d
-p,f,y,y,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,v,g
-e,k,s,p,t,n,f,c,b,w,e,?,s,s,e,e,p,w,t,e,w,c,w
-p,f,y,n,f,n,f,c,n,w,e,?,k,y,w,n,p,w,o,e,w,v,d
-p,x,y,g,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,y,d
-p,x,f,y,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,y,d
-p,x,y,g,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,v,p
-p,f,s,b,t,f,f,c,b,w,t,b,f,f,w,w,p,w,o,p,h,v,u
-e,f,s,b,t,n,f,c,b,w,e,?,s,s,w,w,p,w,t,e,w,c,w
-p,f,s,w,t,f,f,c,b,w,t,b,f,f,w,w,p,w,o,p,h,s,u
-p,f,y,g,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,y,p
-p,x,f,y,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,v,p
-p,f,y,g,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,v,p
-p,f,y,y,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,y,g
-p,f,f,y,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,v,d
-p,x,s,b,t,f,f,c,b,h,t,b,s,s,w,w,p,w,o,p,h,v,u
-p,f,f,g,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,y,p
-p,x,y,g,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,y,d
-p,f,f,y,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,y,g
-p,f,y,y,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,y,d
-p,f,s,g,t,f,f,c,b,p,t,b,s,f,w,w,p,w,o,p,h,s,u
-e,f,s,b,t,n,f,c,b,w,e,?,s,s,w,e,p,w,t,e,w,c,w
-e,x,s,p,t,n,f,c,b,w,e,?,s,s,e,e,p,w,t,e,w,c,w
-e,k,s,n,t,n,f,c,b,e,e,?,s,s,w,w,p,w,t,e,w,c,w
-p,f,y,p,t,n,f,c,b,g,e,b,s,s,w,w,p,w,t,p,r,v,g
-p,f,y,g,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,v,p
-p,x,y,y,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,v,p
-p,f,f,g,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,v,g
-p,f,f,g,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,y,d
-p,x,f,y,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,y,p
-p,f,f,g,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,v,d
-p,f,y,g,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,y,d
-p,f,y,y,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,v,d
-p,f,f,y,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,v,g
-p,f,y,y,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,y,g
-p,x,y,y,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,v,d
-p,x,y,y,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,y,p
-p,f,y,g,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,v,g
-p,f,f,g,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,y,d
-p,x,y,y,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,v,p
-p,x,y,y,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,y,p
-e,f,y,u,f,n,f,c,n,w,e,?,s,f,w,w,p,w,o,f,h,y,d
-p,f,f,g,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,v,g
-p,x,y,n,f,y,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,l
-p,x,y,y,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,v,g
-p,x,y,y,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,y,p
-p,x,y,g,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,v,g
-p,f,f,y,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,v,d
-p,x,f,y,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,y,d
-p,x,y,g,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,v,p
-e,f,s,b,t,n,f,c,b,e,e,?,s,s,w,w,p,w,t,e,w,c,w
-p,f,f,g,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,y,p
-p,f,y,y,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,v,d
-e,x,s,n,t,n,f,c,b,w,e,?,s,s,w,e,p,w,t,e,w,c,w
-p,f,f,g,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,y,p
-p,x,y,g,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,v,d
-p,x,y,n,f,f,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,p
-p,f,f,y,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,v,d
-p,x,y,n,f,n,f,c,n,w,e,?,k,y,w,n,p,w,o,e,w,v,d
-p,b,s,b,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,r,v,g
-p,f,f,y,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,v,d
-p,f,y,g,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,y,d
-e,f,y,u,f,n,f,c,n,u,e,?,s,f,w,w,p,w,o,f,h,y,d
-p,x,f,y,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,v,g
-p,f,y,y,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,y,g
-p,f,f,y,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,y,g
-p,x,y,y,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,y,d
-e,x,y,e,t,n,f,c,b,w,e,?,s,s,e,w,p,w,t,e,w,c,w
-p,f,f,g,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,v,g
-p,x,y,y,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,v,g
-p,x,y,y,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,v,p
-p,f,y,g,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,v,p
-e,k,s,b,t,n,f,c,b,w,e,?,s,s,w,e,p,w,t,e,w,c,w
-p,f,y,y,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,y,d
-p,x,y,g,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,v,p
-p,x,f,y,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,v,d
-p,f,f,y,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,v,p
-p,f,s,w,t,f,f,c,b,h,t,b,s,f,w,w,p,w,o,p,h,v,u
-p,x,s,g,f,c,f,c,n,p,e,b,s,s,w,w,p,w,o,p,n,v,d
-p,f,y,y,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,y,d
-e,f,y,u,f,n,f,c,n,p,e,?,s,f,w,w,p,w,o,f,h,v,d
-p,x,f,y,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,y,d
-p,f,y,g,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,y,d
-p,x,f,y,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,v,g
-p,x,f,y,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,v,p
-p,x,s,g,t,f,f,c,b,p,t,b,f,s,w,w,p,w,o,p,h,s,u
-p,f,y,g,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,y,d
-p,f,f,y,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,y,p
-p,f,f,y,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,y,d
-p,x,y,g,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,v,d
-p,x,y,g,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,v,g
-p,x,y,g,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,y,d
-p,x,f,g,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,y,g
-p,x,y,y,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,y,p
-p,f,f,g,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,v,g
-p,f,f,y,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,y,d
-p,x,y,g,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,y,p
-p,f,f,y,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,v,p
-p,x,y,n,f,s,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,l
-p,x,f,y,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,y,p
-p,x,y,y,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,v,p
-p,f,s,b,t,f,f,c,b,w,t,b,f,s,w,w,p,w,o,p,h,v,g
-p,f,f,g,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,v,d
-p,f,y,y,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,y,p
-p,x,f,y,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,y,p
-p,f,f,g,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,v,d
-p,x,y,y,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,y,d
-p,x,y,g,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,v,d
-p,x,y,e,f,y,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,l
-e,k,y,b,t,n,f,c,b,e,e,?,s,s,e,w,p,w,t,e,w,c,w
-e,f,y,b,t,n,f,c,b,w,e,?,s,s,e,w,p,w,t,e,w,c,w
-p,x,y,g,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,y,d
-p,f,f,y,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,y,p
-p,x,y,g,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,y,g
-p,f,y,g,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,y,g
-p,x,s,b,t,f,f,c,b,w,t,b,s,f,w,w,p,w,o,p,h,s,u
-p,f,y,y,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,v,p
-p,x,s,w,t,f,f,c,b,p,t,b,f,f,w,w,p,w,o,p,h,s,u
-p,x,f,y,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,v,p
-p,x,y,g,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,y,p
-p,x,s,w,t,f,f,c,b,p,t,b,s,f,w,w,p,w,o,p,h,s,u
-p,f,s,b,t,f,f,c,b,p,t,b,f,f,w,w,p,w,o,p,h,s,g
-p,x,s,w,t,f,f,c,b,h,t,b,f,s,w,w,p,w,o,p,h,s,u
-p,f,f,y,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,y,d
-p,f,y,g,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,y,p
-p,x,f,y,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,v,g
-p,f,f,g,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,v,p
-e,x,y,w,f,n,f,c,n,u,e,?,s,f,w,w,p,w,o,f,h,y,d
-e,x,s,e,t,n,f,c,b,e,e,?,s,s,w,w,p,w,t,e,w,c,w
-p,x,y,g,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,v,g
-p,f,f,g,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,v,g
-p,x,y,n,f,y,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,l
-p,x,y,g,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,v,p
-p,f,f,g,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,y,p
-p,f,f,y,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,v,p
-p,f,f,y,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,y,d
-e,f,f,e,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,n,y,d
-p,x,y,y,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,y,g
-p,f,y,y,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,v,d
-p,f,f,g,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,v,p
-p,x,y,y,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,y,g
-p,x,f,y,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,y,p
-p,k,f,y,f,n,f,c,n,w,e,?,k,y,w,n,p,w,o,e,w,v,d
-p,f,f,y,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,y,g
-e,k,f,c,f,n,f,w,n,w,e,b,f,s,w,n,p,w,o,e,w,v,l
-p,f,f,g,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,v,p
-p,f,y,y,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,y,p
-p,x,y,g,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,v,d
-p,x,y,g,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,v,p
-p,f,f,y,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,v,g
-p,f,y,y,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,v,d
-p,x,f,y,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,y,d
-p,f,f,y,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,v,p
-p,x,y,y,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,y,d
-p,b,y,p,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,r,v,g
-e,k,y,p,t,n,f,c,b,e,e,?,s,s,w,w,p,w,t,e,w,c,w
-p,x,y,y,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,v,p
-p,x,f,y,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,v,g
-e,k,y,b,t,n,f,c,b,e,e,?,s,s,w,e,p,w,t,e,w,c,w
-p,f,y,y,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,y,d
-p,x,s,w,t,f,f,c,b,p,t,b,s,s,w,w,p,w,o,p,h,s,u
-p,x,y,n,f,s,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,p
-p,x,y,n,f,y,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,d
-p,x,y,e,f,y,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,p
-p,f,g,w,t,n,f,w,n,w,e,b,s,s,w,w,p,w,o,p,w,c,l
-p,x,s,g,t,f,f,c,b,p,t,b,f,s,w,w,p,w,o,p,h,v,u
-e,f,y,c,f,n,f,w,n,w,e,b,f,s,w,n,p,w,o,e,w,v,l
-p,x,y,g,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,y,g
-p,f,y,y,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,v,d
-p,f,y,y,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,y,p
-e,f,y,p,t,n,f,c,b,w,e,?,s,s,w,e,p,w,t,e,w,c,w
-p,x,s,b,t,f,f,c,b,w,t,b,f,s,w,w,p,w,o,p,h,v,g
-p,x,s,g,t,f,f,c,b,h,t,b,s,f,w,w,p,w,o,p,h,s,g
-p,x,y,e,f,y,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,p
-e,k,y,c,f,n,f,w,n,w,e,b,s,f,w,n,p,w,o,e,w,v,l
-p,x,s,w,t,f,f,c,b,w,t,b,s,f,w,w,p,w,o,p,h,s,u
-p,x,s,w,t,f,f,c,b,h,t,b,s,f,w,w,p,w,o,p,h,v,g
-p,f,s,b,t,f,f,c,b,w,t,b,s,f,w,w,p,w,o,p,h,v,u
-p,f,s,b,t,f,f,c,b,p,t,b,s,s,w,w,p,w,o,p,h,v,g
-e,k,y,c,f,n,f,w,n,w,e,b,f,s,w,n,p,w,o,e,w,v,l
-p,x,y,g,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,v,d
-p,f,s,w,t,f,f,c,b,h,t,b,f,f,w,w,p,w,o,p,h,v,g
-p,f,s,g,t,f,f,c,b,w,t,b,s,s,w,w,p,w,o,p,h,s,u
-p,c,g,w,t,n,f,w,n,w,e,b,s,s,w,w,p,w,o,p,w,c,l
-p,x,s,g,t,f,f,c,b,p,t,b,s,f,w,w,p,w,o,p,h,s,u
-p,b,g,w,t,n,f,w,n,w,e,b,s,s,w,w,p,w,o,p,w,c,l
-p,x,s,b,t,f,f,c,b,p,t,b,s,s,w,w,p,w,o,p,h,v,g
-p,f,s,w,t,f,f,c,b,h,t,b,f,f,w,w,p,w,o,p,h,s,u
-p,b,f,y,f,n,f,c,n,w,e,?,k,y,w,y,p,w,o,e,w,v,d
-p,x,y,n,f,s,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,d
-p,x,y,n,f,y,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,d
-e,k,s,n,t,n,f,c,b,w,e,?,s,s,e,w,p,w,t,e,w,c,w
-e,k,y,b,t,n,f,c,b,w,e,?,s,s,e,e,p,w,t,e,w,c,w
-p,f,s,w,t,f,f,c,b,w,t,b,s,s,w,w,p,w,o,p,h,v,g
-p,f,s,b,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,r,v,m
-p,f,y,y,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,y,p
-p,x,y,g,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,y,g
-p,f,s,b,t,n,f,c,b,r,e,b,s,s,w,w,p,w,t,p,r,v,g
-p,x,y,n,f,f,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,p
-p,x,y,e,f,y,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,p
-p,f,f,g,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,y,d
-e,x,s,e,t,n,f,c,b,e,e,?,s,s,e,w,p,w,t,e,w,c,w
-p,x,y,n,f,f,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,l
-p,x,f,y,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,y,g
-p,x,y,n,f,y,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,d
-e,f,f,n,f,n,f,w,n,w,e,b,s,f,w,n,p,w,o,e,w,v,l
-p,x,y,n,f,f,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,p
-e,x,s,b,t,n,f,c,b,e,e,?,s,s,e,e,p,w,t,e,w,c,w
-e,f,y,n,t,n,f,c,b,w,e,?,s,s,w,e,p,w,t,e,w,c,w
-p,f,y,y,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,y,p
-p,f,y,b,t,n,f,c,b,r,e,b,s,s,w,w,p,w,t,p,r,v,m
-p,x,y,n,f,n,f,c,n,w,e,?,k,y,w,y,p,w,o,e,w,v,d
-p,x,y,n,f,y,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,l
-p,x,s,b,t,f,f,c,b,w,t,b,s,s,w,w,p,w,o,p,h,s,g
-p,x,s,b,t,f,f,c,b,h,t,b,f,s,w,w,p,w,o,p,h,v,g
-p,f,y,y,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,y,p
-p,f,s,g,t,f,f,c,b,w,t,b,s,f,w,w,p,w,o,p,h,v,g
-p,x,y,n,f,f,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,l
-p,x,s,b,t,f,f,c,b,p,t,b,f,s,w,w,p,w,o,p,h,v,g
-p,f,f,n,f,n,f,c,n,w,e,?,k,y,w,y,p,w,o,e,w,v,d
-e,x,y,e,t,n,f,c,b,w,e,?,s,s,w,w,p,w,t,e,w,c,w
-p,f,s,b,t,f,f,c,b,w,t,b,f,f,w,w,p,w,o,p,h,s,g
-p,f,s,w,t,f,f,c,b,p,t,b,f,s,w,w,p,w,o,p,h,s,g
-e,k,y,e,t,n,f,c,b,w,e,?,s,s,e,e,p,w,t,e,w,c,w
-p,x,f,y,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,y,p
-p,x,f,n,f,n,f,c,n,w,e,?,k,y,w,n,p,w,o,e,w,v,d
-e,x,y,n,t,n,f,c,b,e,e,?,s,s,e,w,p,w,t,e,w,c,w
-p,x,y,g,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,y,g
-p,f,s,b,t,f,f,c,b,h,t,b,f,s,w,w,p,w,o,p,h,v,g
-p,f,s,b,t,f,f,c,b,h,t,b,f,s,w,w,p,w,o,p,h,s,g
-p,f,y,y,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,v,g
-p,f,y,y,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,v,d
-e,x,s,p,t,n,f,c,b,w,e,?,s,s,w,w,p,w,t,e,w,c,w
-e,f,s,p,t,n,f,c,b,e,e,?,s,s,e,e,p,w,t,e,w,c,w
-p,b,y,w,t,n,f,c,b,r,e,b,s,s,w,w,p,w,t,p,r,v,g
-e,x,y,c,f,n,f,w,n,w,e,b,f,f,w,n,p,w,o,e,w,v,l
-e,f,s,n,t,n,f,c,b,w,e,?,s,s,e,w,p,w,t,e,w,c,w
-p,x,s,b,t,f,f,c,b,h,t,b,f,s,w,w,p,w,o,p,h,v,u
-p,x,s,g,t,f,f,c,b,w,t,b,f,s,w,w,p,w,o,p,h,v,g
-p,f,y,y,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,y,d
-p,x,s,b,t,f,f,c,b,w,t,b,f,s,w,w,p,w,o,p,h,v,u
-p,x,s,w,t,f,f,c,b,p,t,b,s,s,w,w,p,w,o,p,h,s,g
-e,f,y,c,f,n,f,w,n,w,e,b,s,f,w,n,p,w,o,e,w,v,l
-p,x,y,e,f,y,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,d
-p,f,f,y,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,y,d
-e,x,y,w,f,n,f,c,n,h,e,?,s,f,w,w,p,w,o,f,h,v,d
-p,x,y,y,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,v,p
-p,f,y,p,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,r,v,g
-p,f,s,g,t,f,f,c,b,h,t,b,f,s,w,w,p,w,o,p,h,v,u
-p,x,y,n,f,s,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,d
-e,k,s,p,t,n,f,c,b,w,e,?,s,s,e,w,p,w,t,e,w,c,w
-p,x,y,n,f,f,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,l
-p,x,y,g,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,v,p
-p,f,s,b,t,f,f,c,b,h,t,b,f,f,w,w,p,w,o,p,h,s,u
-e,k,y,p,t,n,f,c,b,e,e,?,s,s,w,e,p,w,t,e,w,c,w
-p,x,f,y,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,v,p
-p,f,y,g,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,v,g
-p,f,f,g,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,y,d
-p,x,s,b,t,f,f,c,b,p,t,b,s,f,w,w,p,w,o,p,h,s,g
-p,x,y,n,f,f,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,p
-p,x,y,y,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,v,d
-p,f,f,y,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,v,d
-p,f,f,n,f,n,f,c,n,w,e,?,k,y,w,n,p,w,o,e,w,v,d
-e,k,s,b,t,n,f,c,b,w,e,?,s,s,w,w,p,w,t,e,w,c,w
-p,f,y,y,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,v,g
-p,x,y,e,f,y,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,l
-p,x,s,g,t,f,f,c,b,h,t,b,f,f,w,w,p,w,o,p,h,s,u
-e,f,s,n,t,n,f,c,b,e,e,?,s,s,w,e,p,w,t,e,w,c,w
-p,x,y,g,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,v,p
-p,x,y,n,f,y,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,d
-p,x,s,g,t,f,f,c,b,p,t,b,s,s,w,w,p,w,o,p,h,v,u
-p,x,s,g,t,f,f,c,b,h,t,b,f,s,w,w,p,w,o,p,h,v,g
-p,f,y,p,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,r,v,m
-e,k,y,b,t,n,f,c,b,w,e,?,s,s,e,w,p,w,t,e,w,c,w
-p,x,s,w,t,f,f,c,b,w,t,b,s,s,w,w,p,w,o,p,h,v,u
-p,x,y,n,f,y,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,d
-e,k,y,n,t,n,f,c,b,w,e,?,s,s,e,w,p,w,t,e,w,c,w
-p,f,y,y,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,y,d
-e,x,y,p,t,n,f,c,b,w,e,?,s,s,e,e,p,w,t,e,w,c,w
-p,f,s,g,t,f,f,c,b,p,t,b,f,f,w,w,p,w,o,p,h,s,u
-e,x,s,p,t,n,f,c,b,e,e,?,s,s,w,e,p,w,t,e,w,c,w
-p,b,y,n,f,n,f,c,n,w,e,?,k,y,w,n,p,w,o,e,w,v,d
-p,f,s,b,t,f,f,c,b,h,t,b,f,f,w,w,p,w,o,p,h,v,g
-p,x,y,n,f,f,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,p
-p,x,y,n,f,f,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,d
-p,x,s,b,t,f,f,c,b,w,t,b,s,s,w,w,p,w,o,p,h,s,u
-p,x,y,n,f,f,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,l
-e,k,y,p,t,n,f,c,b,w,e,?,s,s,w,e,p,w,t,e,w,c,w
-e,k,f,c,f,n,f,w,n,w,e,b,f,f,w,n,p,w,o,e,w,v,l
-p,x,y,y,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,v,g
-e,x,s,n,t,n,f,c,b,w,e,?,s,s,e,w,p,w,t,e,w,c,w
-p,f,f,g,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,y,p
-p,f,y,y,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,v,d
-p,f,s,g,t,f,f,c,b,w,t,b,s,f,w,w,p,w,o,p,h,v,u
-p,k,y,w,t,n,f,w,n,w,e,b,s,s,w,w,p,w,o,p,w,c,l
-e,k,f,n,f,n,f,w,n,w,e,b,f,f,w,n,p,w,o,e,w,v,l
-p,k,f,y,f,n,f,c,n,w,e,?,k,y,w,y,p,w,o,e,w,v,d
-p,f,s,w,t,f,f,c,b,w,t,b,f,f,w,w,p,w,o,p,h,s,g
-p,f,y,y,f,n,f,c,n,w,e,?,k,y,w,y,p,w,o,e,w,v,d
-p,f,y,y,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,v,d
-e,k,s,b,t,n,f,c,b,e,e,?,s,s,w,e,p,w,t,e,w,c,w
-p,f,y,y,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,v,p
-p,x,y,n,f,y,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,p
-p,x,y,y,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,y,d
-p,x,y,y,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,v,p
-p,b,y,w,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,r,v,g
-p,f,y,y,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,y,g
-p,x,s,w,t,f,f,c,b,p,t,b,s,f,w,w,p,w,o,p,h,v,g
-p,x,s,b,t,f,f,c,b,w,t,b,s,s,w,w,p,w,o,p,h,v,g
-e,k,y,b,t,n,f,c,b,w,e,?,s,s,w,e,p,w,t,e,w,c,w
-p,f,y,y,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,v,p
-p,x,s,b,t,f,f,c,b,p,t,b,f,f,w,w,p,w,o,p,h,s,g
-p,b,y,b,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,r,v,m
-p,x,s,w,t,f,f,c,b,w,t,b,s,f,w,w,p,w,o,p,h,v,g
-e,k,s,n,t,n,f,c,b,w,e,?,s,s,w,e,p,w,t,e,w,c,w
-p,f,s,g,t,f,f,c,b,w,t,b,f,s,w,w,p,w,o,p,h,s,u
-p,x,f,y,f,n,f,c,n,w,e,?,k,y,w,y,p,w,o,e,w,v,d
-p,f,s,g,t,f,f,c,b,p,t,b,s,s,w,w,p,w,o,p,h,v,g
-p,f,y,y,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,y,d
-p,k,y,n,f,n,f,c,n,w,e,?,k,y,w,y,p,w,o,e,w,v,d
-p,f,s,b,t,f,f,c,b,p,t,b,s,f,w,w,p,w,o,p,h,s,u
-e,f,s,e,t,n,f,c,b,w,e,?,s,s,w,w,p,w,t,e,w,c,w
-p,x,y,n,f,f,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,d
-p,x,y,n,f,y,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,d
-p,x,y,n,f,y,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,l
-p,x,s,w,t,f,f,c,b,h,t,b,f,s,w,w,p,w,o,p,h,v,u
-e,k,s,n,t,n,f,c,b,e,e,?,s,s,e,e,p,w,t,e,w,c,w
-p,f,y,g,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,y,g
-e,x,s,b,t,n,f,c,b,e,e,?,s,s,w,e,p,w,t,e,w,c,w
-p,f,s,w,t,f,f,c,b,w,t,b,f,s,w,w,p,w,o,p,h,s,u
-e,k,f,n,f,n,f,w,n,w,e,b,s,s,w,n,p,w,o,e,w,v,l
-p,x,y,n,f,s,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,l
-p,f,s,w,t,f,f,c,b,h,t,b,f,s,w,w,p,w,o,p,h,v,g
-p,x,s,g,t,f,f,c,b,h,t,b,s,s,w,w,p,w,o,p,h,v,u
-p,k,y,y,f,n,f,c,n,w,e,?,k,y,w,y,p,w,o,e,w,v,d
-p,f,s,w,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,r,v,m
-p,f,f,y,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,y,p
-e,k,y,p,t,n,f,c,b,w,e,?,s,s,e,w,p,w,t,e,w,c,w
-p,f,y,w,t,n,f,w,n,w,e,b,s,s,w,w,p,w,o,p,w,c,l
-p,f,s,b,t,f,f,c,b,p,t,b,f,s,w,w,p,w,o,p,h,v,u
-e,f,y,w,f,n,f,c,n,u,e,?,s,f,w,w,p,w,o,f,h,y,d
-e,x,y,r,f,n,f,c,n,w,e,?,s,f,w,w,p,w,o,f,h,v,d
-e,x,s,b,t,n,f,c,b,w,e,?,s,s,w,e,p,w,t,e,w,c,w
-p,f,s,g,t,f,f,c,b,h,t,b,s,s,w,w,p,w,o,p,h,v,g
-p,x,y,e,f,y,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,d
-p,f,y,y,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,y,p
-p,f,s,g,t,f,f,c,b,w,t,b,s,s,w,w,p,w,o,p,h,s,g
-p,f,s,b,t,f,f,c,b,h,t,b,s,s,w,w,p,w,o,p,h,s,g
-e,f,s,e,t,n,f,c,b,e,e,?,s,s,e,w,p,w,t,e,w,c,w
-p,x,y,e,f,y,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,d
-p,f,f,g,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,y,g
-p,f,y,y,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,y,d
-e,x,s,n,t,n,f,c,b,e,e,?,s,s,e,w,p,w,t,e,w,c,w
-p,f,s,g,t,f,f,c,b,p,t,b,f,f,w,w,p,w,o,p,h,s,g
-p,x,y,n,f,s,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,l
-p,f,s,w,t,f,f,c,b,p,t,b,s,s,w,w,p,w,o,p,h,s,g
-e,x,f,c,f,n,f,w,n,w,e,b,s,s,w,n,p,w,o,e,w,v,l
-p,f,s,g,t,f,f,c,b,w,t,b,f,s,w,w,p,w,o,p,h,v,g
-p,f,f,g,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,v,p
-e,x,y,r,f,n,f,c,n,w,e,?,s,f,w,w,p,w,o,f,h,y,d
-p,x,f,y,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,v,p
-p,x,y,n,f,f,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,l
-p,x,y,n,f,s,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,l
-e,f,y,e,t,n,f,c,b,w,e,?,s,s,e,w,p,w,t,e,w,c,w
-p,x,s,g,t,f,f,c,b,w,t,b,s,f,w,w,p,w,o,p,h,v,u
-e,k,y,n,t,n,f,c,b,w,e,?,s,s,w,e,p,w,t,e,w,c,w
-e,f,y,e,t,n,f,c,b,e,e,?,s,s,w,w,p,w,t,e,w,c,w
-p,x,y,n,f,f,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,p
-p,b,y,n,f,n,f,c,n,w,e,?,k,y,w,y,p,w,o,e,w,v,d
-p,x,y,n,f,s,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,p
-e,f,y,b,t,n,f,c,b,e,e,?,s,s,w,e,p,w,t,e,w,c,w
-p,x,s,w,t,f,f,c,b,h,t,b,f,s,w,w,p,w,o,p,h,v,g
-p,f,y,y,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,v,g
-e,f,y,n,t,n,f,c,b,w,e,?,s,s,w,w,p,w,t,e,w,c,w
-p,b,y,w,t,n,f,c,b,g,e,b,s,s,w,w,p,w,t,p,r,v,m
-e,x,y,n,f,n,f,w,n,w,e,b,s,f,w,n,p,w,o,e,w,v,l
-p,f,y,y,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,v,p
-p,x,s,b,t,f,f,c,b,h,t,b,f,s,w,w,p,w,o,p,h,s,g
-p,x,y,n,f,s,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,l
-p,x,s,g,t,f,f,c,b,h,t,b,s,s,w,w,p,w,o,p,h,s,u
-p,f,s,g,t,f,f,c,b,w,t,b,s,f,w,w,p,w,o,p,h,s,u
-p,x,y,n,f,f,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,p
-e,f,y,w,f,n,f,c,n,p,e,?,s,f,w,w,p,w,o,f,h,v,d
-p,x,y,n,f,s,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,l
-p,f,f,g,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,v,g
-p,x,y,e,f,y,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,l
-p,x,s,w,t,f,f,c,b,w,t,b,s,s,w,w,p,w,o,p,h,v,g
-p,f,y,g,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,v,d
-p,x,s,w,t,f,f,c,b,h,t,b,s,f,w,w,p,w,o,p,h,s,u
-p,k,y,y,f,n,f,c,n,w,e,?,k,y,w,n,p,w,o,e,w,v,d
-p,f,s,w,t,f,f,c,b,w,t,b,s,f,w,w,p,w,o,p,h,v,u
-p,f,s,g,t,f,f,c,b,h,t,b,f,f,w,w,p,w,o,p,h,v,u
-e,x,y,c,f,n,f,w,n,w,e,b,f,s,w,n,p,w,o,e,w,v,l
-p,x,s,w,t,f,f,c,b,h,t,b,s,s,w,w,p,w,o,p,h,v,u
-p,x,y,n,f,s,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,p
-p,f,y,p,t,n,f,c,b,r,e,b,s,s,w,w,p,w,t,p,r,v,g
-p,f,y,y,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,y,p
-e,x,s,p,t,n,f,c,b,e,e,?,s,s,e,e,p,w,t,e,w,c,w
-p,f,y,b,t,n,f,c,b,r,e,b,s,s,w,w,p,w,t,p,r,v,g
-p,x,f,y,f,n,f,c,n,w,e,?,k,y,w,n,p,w,o,e,w,v,d
-p,f,y,y,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,y,p
-e,f,f,n,f,n,f,w,n,w,e,b,f,f,w,n,p,w,o,e,w,v,l
-e,x,f,n,f,n,f,w,n,w,e,b,f,s,w,n,p,w,o,e,w,v,l
-p,x,y,n,f,y,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,l
-p,f,s,g,t,f,f,c,b,p,t,b,f,s,w,w,p,w,o,p,h,v,u
-p,x,s,b,t,f,f,c,b,h,t,b,f,f,w,w,p,w,o,p,h,s,u
-p,f,s,g,t,f,f,c,b,w,t,b,s,s,w,w,p,w,o,p,h,v,g
-p,x,s,b,t,f,f,c,b,h,t,b,f,f,w,w,p,w,o,p,h,s,g
-e,x,y,r,f,n,f,c,n,h,e,?,s,f,w,w,p,w,o,f,h,v,d
-p,f,y,y,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,y,d
-p,f,s,g,t,f,f,c,b,h,t,b,f,s,w,w,p,w,o,p,h,s,u
-p,x,y,n,f,f,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,p
-p,f,y,y,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,y,g
-p,f,s,w,t,f,f,c,b,w,t,b,f,f,w,w,p,w,o,p,h,v,u
-p,x,s,w,t,f,f,c,b,w,t,b,f,s,w,w,p,w,o,p,h,s,u
-p,f,y,g,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,y,p
-p,f,y,y,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,y,p
-p,x,y,n,f,f,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,d
-p,x,y,y,f,n,f,c,n,w,e,?,k,y,w,n,p,w,o,e,w,v,d
-e,f,s,n,t,n,f,c,b,e,e,?,s,s,e,e,p,w,t,e,w,c,w
-p,f,y,y,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,y,d
-p,f,s,g,t,f,f,c,b,h,t,b,s,f,w,w,p,w,o,p,h,s,u
-p,x,y,n,f,y,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,l
-p,f,y,y,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,v,d
-p,f,y,y,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,y,d
-p,f,s,w,t,f,f,c,b,h,t,b,s,s,w,w,p,w,o,p,h,s,u
-p,f,s,p,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,r,v,m
-p,f,y,y,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,y,g
-e,k,s,p,t,n,f,c,b,w,e,?,s,s,w,e,p,w,t,e,w,c,w
-p,x,s,b,t,f,f,c,b,h,t,b,s,s,w,w,p,w,o,p,h,s,g
-p,x,s,w,t,f,f,c,b,p,t,b,f,s,w,w,p,w,o,p,h,s,g
-p,x,s,w,t,f,f,c,b,h,t,b,f,f,w,w,p,w,o,p,h,s,g
-p,x,y,n,f,f,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,d
-p,b,y,p,t,n,f,c,b,r,e,b,s,s,w,w,p,w,t,p,r,v,m
-p,x,s,b,t,f,f,c,b,h,t,b,f,s,w,w,p,w,o,p,h,s,u
-e,x,y,p,t,n,f,c,b,w,e,?,s,s,w,w,p,w,t,e,w,c,w
-p,x,y,n,f,s,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,p
-p,x,y,y,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,v,p
-p,x,s,w,t,f,f,c,b,p,t,b,s,f,w,w,p,w,o,p,h,v,u
-e,f,y,b,t,n,f,c,b,e,e,?,s,s,e,w,p,w,t,e,w,c,w
-p,f,s,b,t,f,f,c,b,w,t,b,f,s,w,w,p,w,o,p,h,v,u
-e,x,f,c,f,n,f,w,n,w,e,b,f,f,w,n,p,w,o,e,w,v,l
-e,f,s,p,t,n,f,c,b,e,e,?,s,s,w,e,p,w,t,e,w,c,w
-e,x,y,n,f,n,f,w,n,w,e,b,f,f,w,n,p,w,o,e,w,v,l
-e,f,s,p,t,n,f,c,b,w,e,?,s,s,e,e,p,w,t,e,w,c,w
-p,f,s,g,t,f,f,c,b,w,t,b,s,s,w,w,p,w,o,p,h,v,u
-e,k,s,e,t,n,f,c,b,w,e,?,s,s,w,e,p,w,t,e,w,c,w
-e,k,y,e,t,n,f,c,b,w,e,?,s,s,w,e,p,w,t,e,w,c,w
-p,f,s,w,t,f,f,c,b,p,t,b,s,f,w,w,p,w,o,p,h,v,g
-e,k,s,b,t,n,f,c,b,e,e,?,s,s,e,w,p,w,t,e,w,c,w
-p,x,s,b,t,f,f,c,b,p,t,b,f,f,w,w,p,w,o,p,h,v,g
-p,b,y,y,f,n,f,c,n,w,e,?,k,y,w,y,p,w,o,e,w,v,d
-p,x,s,w,t,f,f,c,b,h,t,b,s,s,w,w,p,w,o,p,h,s,u
-p,x,f,y,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,v,g
-p,f,y,y,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,y,p
-p,b,f,n,f,n,f,c,n,w,e,?,k,y,w,y,p,w,o,e,w,v,d
-p,f,y,y,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,y,p
-p,f,y,y,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,v,p
-e,f,y,c,f,n,f,w,n,w,e,b,s,s,w,n,p,w,o,e,w,v,l
-p,x,y,e,f,y,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,d
-p,f,s,w,t,f,f,c,b,h,t,b,s,s,w,w,p,w,o,p,h,s,g
-p,x,y,n,f,f,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,p
-p,b,y,p,t,n,f,c,b,r,e,b,s,s,w,w,p,w,t,p,r,v,g
-p,f,y,y,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,y,d
-p,x,s,b,t,f,f,c,b,w,t,b,s,f,w,w,p,w,o,p,h,s,g
-p,f,y,y,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,v,p
-p,f,s,b,t,f,f,c,b,p,t,b,f,s,w,w,p,w,o,p,h,s,g
-p,f,s,b,t,f,f,c,b,h,t,b,s,s,w,w,p,w,o,p,h,s,u
-p,x,y,n,f,y,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,p
-p,f,y,y,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,v,p
-e,x,s,n,t,n,f,c,b,e,e,?,s,s,w,w,p,w,t,e,w,c,w
-p,f,y,y,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,v,g
-e,f,s,b,t,n,f,c,b,e,e,?,s,s,w,e,p,w,t,e,w,c,w
-e,f,s,e,t,n,f,c,b,w,e,?,s,s,e,w,p,w,t,e,w,c,w
-e,x,s,n,t,n,f,c,b,e,e,?,s,s,e,e,p,w,t,e,w,c,w
-p,f,s,w,t,f,f,c,b,p,t,b,f,s,w,w,p,w,o,p,h,v,u
-p,f,y,y,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,v,d
-e,f,s,p,t,n,f,c,b,e,e,?,s,s,e,w,p,w,t,e,w,c,w
-e,x,s,e,t,n,f,c,b,w,e,?,s,s,w,e,p,w,t,e,w,c,w
-p,f,y,y,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,v,d
-p,b,y,b,t,n,f,c,b,g,e,b,s,s,w,w,p,w,t,p,r,v,m
-p,f,s,b,t,f,f,c,b,h,t,b,s,f,w,w,p,w,o,p,h,s,u
-p,f,s,g,t,f,f,c,b,w,t,b,f,s,w,w,p,w,o,p,h,s,g
-p,x,s,b,t,f,f,c,b,p,t,b,s,s,w,w,p,w,o,p,h,s,g
-p,x,f,y,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,y,p
-p,f,s,w,t,f,f,c,b,w,t,b,s,s,w,w,p,w,o,p,h,s,g
-p,x,s,g,t,f,f,c,b,w,t,b,f,s,w,w,p,w,o,p,h,s,u
-e,f,y,r,f,n,f,c,n,h,e,?,s,f,w,w,p,w,o,f,h,y,d
-p,f,y,y,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,v,g
-p,x,y,n,f,f,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,d
-p,x,s,b,t,f,f,c,b,w,t,b,f,s,w,w,p,w,o,p,h,s,g
-e,x,f,c,f,n,f,w,n,w,e,b,f,s,w,n,p,w,o,e,w,v,l
-p,x,s,g,t,f,f,c,b,h,t,b,f,s,w,w,p,w,o,p,h,v,u
-e,x,y,u,f,n,f,c,n,p,e,?,s,f,w,w,p,w,o,f,h,y,d
-p,x,f,y,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,v,p
-p,f,f,g,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,v,g
-p,f,y,w,t,n,f,c,b,g,e,b,s,s,w,w,p,w,t,p,r,v,m
-p,x,y,n,f,y,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,d
-p,x,y,n,f,s,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,l
-p,x,y,g,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,v,p
-e,x,y,p,t,n,f,c,b,e,e,?,s,s,e,e,p,w,t,e,w,c,w
-p,f,s,g,t,f,f,c,b,w,t,b,f,f,w,w,p,w,o,p,h,s,g
-p,x,s,b,t,f,f,c,b,w,t,b,s,f,w,w,p,w,o,p,h,v,g
-p,x,y,n,f,y,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,p
-p,x,s,g,t,f,f,c,b,h,t,b,f,s,w,w,p,w,o,p,h,s,u
-p,x,y,n,f,y,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,p
-p,f,s,w,t,f,f,c,b,p,t,b,f,f,w,w,p,w,o,p,h,s,g
-p,x,s,g,t,f,f,c,b,w,t,b,s,s,w,w,p,w,o,p,h,v,u
-p,x,s,g,t,f,f,c,b,w,t,b,s,f,w,w,p,w,o,p,h,s,g
-p,f,y,y,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,v,p
-p,f,y,y,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,v,p
-p,f,y,g,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,y,g
-p,x,y,y,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,y,g
-p,x,s,g,t,f,f,c,b,w,t,b,f,f,w,w,p,w,o,p,h,s,u
-p,f,s,b,t,f,f,c,b,p,t,b,f,f,w,w,p,w,o,p,h,s,u
-p,f,s,w,t,f,f,c,b,h,t,b,f,s,w,w,p,w,o,p,h,s,u
-p,f,f,g,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,y,g
-p,f,s,b,t,f,f,c,b,h,t,b,f,f,w,w,p,w,o,p,h,s,g
-p,x,y,n,f,s,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,d
-p,x,s,w,t,f,f,c,b,w,t,b,s,s,w,w,p,w,o,p,h,s,g
-e,k,s,e,t,n,f,c,b,e,e,?,s,s,e,w,p,w,t,e,w,c,w
-p,f,y,b,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,r,v,m
-p,x,y,n,f,y,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,p
-e,k,y,n,t,n,f,c,b,e,e,?,s,s,w,e,p,w,t,e,w,c,w
-p,f,s,w,t,f,f,c,b,w,t,b,s,s,w,w,p,w,o,p,h,v,u
-p,f,s,w,t,f,f,c,b,p,t,b,s,s,w,w,p,w,o,p,h,v,u
-p,x,y,n,f,y,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,d
-p,x,s,w,t,f,f,c,b,w,t,b,f,s,w,w,p,w,o,p,h,s,g
-p,x,y,n,f,y,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,l
-p,b,s,p,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,r,v,g
-p,x,y,e,f,y,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,d
-p,f,y,g,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,v,p
-p,x,y,y,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,v,d
-p,x,y,g,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,v,g
-p,f,y,w,t,n,f,c,b,r,e,b,s,s,w,w,p,w,t,p,r,v,m
-p,f,s,g,t,f,f,c,b,h,t,b,f,f,w,w,p,w,o,p,h,s,g
-e,k,y,p,t,n,f,c,b,w,e,?,s,s,e,e,p,w,t,e,w,c,w
-p,x,s,w,t,f,f,c,b,w,t,b,f,s,w,w,p,w,o,p,h,v,g
-e,k,s,p,t,n,f,c,b,e,e,?,s,s,w,w,p,w,t,e,w,c,w
-p,f,s,w,t,f,f,c,b,p,t,b,s,f,w,w,p,w,o,p,h,s,u
-p,x,y,n,f,y,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,p
-p,x,f,y,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,v,d
-p,f,y,g,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,v,p
-p,x,y,g,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,y,p
-e,x,y,e,t,n,f,c,b,w,e,?,s,s,w,e,p,w,t,e,w,c,w
-p,f,y,y,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,v,p
-p,x,y,n,f,s,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,d
-p,x,s,b,t,f,f,c,b,w,t,b,f,f,w,w,p,w,o,p,h,v,g
-p,f,y,y,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,y,d
-e,k,y,e,t,n,f,c,b,e,e,?,s,s,e,w,p,w,t,e,w,c,w
-p,f,y,p,t,n,f,c,b,g,e,b,s,s,w,w,p,w,t,p,r,v,m
-p,f,y,w,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,r,v,m
-p,f,y,y,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,y,p
-p,f,y,y,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,v,g
-e,f,y,r,f,n,f,c,n,w,e,?,s,f,w,w,p,w,o,f,h,v,d
-p,x,s,w,t,f,f,c,b,p,t,b,s,f,w,w,p,w,o,p,h,s,g
-p,x,s,w,t,f,f,c,b,p,t,b,f,f,w,w,p,w,o,p,h,s,g
-p,x,s,b,t,f,f,c,b,p,t,b,f,s,w,w,p,w,o,p,h,s,u
-p,x,y,n,f,y,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,p
-e,k,y,e,t,n,f,c,b,e,e,?,s,s,e,e,p,w,t,e,w,c,w
-e,k,s,p,t,n,f,c,b,w,e,?,s,s,w,w,p,w,t,e,w,c,w
-e,x,y,b,t,n,f,c,b,w,e,?,s,s,w,e,p,w,t,e,w,c,w
-p,x,s,g,t,f,f,c,b,h,t,b,s,s,w,w,p,w,o,p,h,v,g
-p,f,y,y,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,y,g
-p,x,y,n,f,y,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,l
-p,c,y,w,t,n,f,w,n,w,e,b,s,s,w,w,p,w,o,p,w,c,l
-p,f,s,w,t,f,f,c,b,p,t,b,f,f,w,w,p,w,o,p,h,v,g
-p,f,s,g,t,f,f,c,b,w,t,b,s,f,w,w,p,w,o,p,h,s,g
-p,x,y,e,f,y,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,l
-p,x,s,g,t,f,f,c,b,w,t,b,s,s,w,w,p,w,o,p,h,s,g
-p,x,y,n,f,s,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,d
-e,x,y,p,t,n,f,c,b,e,e,?,s,s,e,w,p,w,t,e,w,c,w
-e,f,y,w,f,n,f,c,n,h,e,?,s,f,w,w,p,w,o,f,h,y,d
-p,f,y,y,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,y,g
-p,x,y,n,f,y,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,d
-p,x,f,y,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,y,d
-p,x,y,n,f,s,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,l
-p,x,s,g,t,f,f,c,b,h,t,b,s,f,w,w,p,w,o,p,h,v,u
-p,x,y,n,f,s,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,d
-e,k,f,n,f,n,f,w,n,w,e,b,s,f,w,n,p,w,o,e,w,v,l
-p,x,s,w,t,f,f,c,b,p,t,b,f,s,w,w,p,w,o,p,h,v,u
-p,f,y,y,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,y,p
-e,x,y,p,t,n,f,c,b,e,e,?,s,s,w,e,p,w,t,e,w,c,w
-p,b,y,b,t,n,f,c,b,r,e,b,s,s,w,w,p,w,t,p,r,v,m
-p,f,y,g,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,v,p
-e,f,y,r,f,n,f,c,n,h,e,?,s,f,w,w,p,w,o,f,h,v,d
-p,x,y,n,f,f,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,l
-p,x,y,n,f,y,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,p
-p,f,s,b,t,f,f,c,b,p,t,b,f,f,w,w,p,w,o,p,h,v,g
-p,f,y,y,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,v,d
-p,x,y,n,f,f,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,d
-e,f,y,n,t,n,f,c,b,e,e,?,s,s,w,e,p,w,t,e,w,c,w
-p,x,y,n,f,f,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,l
-e,f,y,e,t,n,f,c,b,w,e,?,s,s,w,e,p,w,t,e,w,c,w
-e,x,y,b,t,n,f,c,b,w,e,?,s,s,w,w,p,w,t,e,w,c,w
-e,x,s,n,t,n,f,c,b,e,e,?,s,s,w,e,p,w,t,e,w,c,w
-p,f,s,w,t,f,f,c,b,w,t,b,s,f,w,w,p,w,o,p,h,v,g
-e,x,y,b,t,n,f,c,b,e,e,?,s,s,w,w,p,w,t,e,w,c,w
-p,x,f,y,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,y,g
-p,f,y,y,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,y,d
-p,f,y,y,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,y,g
-e,k,y,e,t,n,f,c,b,w,e,?,s,s,w,w,p,w,t,e,w,c,w
-p,f,s,w,t,f,f,c,b,p,t,b,s,f,w,w,p,w,o,p,h,v,u
-e,k,y,c,f,n,f,w,n,w,e,b,s,s,w,n,p,w,o,e,w,v,l
-e,k,y,n,f,n,f,w,n,w,e,b,s,s,w,n,p,w,o,e,w,v,l
-p,b,s,p,t,n,f,c,b,r,e,b,s,s,w,w,p,w,t,p,r,v,m
-p,x,y,n,f,y,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,p
-p,x,s,b,t,f,f,c,b,h,t,b,s,f,w,w,p,w,o,p,h,s,u
-e,f,y,r,f,n,f,c,n,p,e,?,s,f,w,w,p,w,o,f,h,v,d
-p,x,s,b,t,f,f,c,b,w,t,b,f,f,w,w,p,w,o,p,h,s,g
-e,f,y,u,f,n,f,c,n,p,e,?,s,f,w,w,p,w,o,f,h,y,d
-p,x,y,n,f,y,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,p
-p,x,s,w,t,f,f,c,b,p,t,b,s,s,w,w,p,w,o,p,h,v,u
-e,x,s,e,t,n,f,c,b,w,e,?,s,s,e,w,p,w,t,e,w,c,w
-e,f,y,p,t,n,f,c,b,e,e,?,s,s,w,e,p,w,t,e,w,c,w
-p,x,y,n,f,s,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,p
-p,x,y,n,f,f,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,p
-p,x,s,w,t,f,f,c,b,p,t,b,s,s,w,w,p,w,o,p,h,v,g
-p,f,y,y,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,v,p
-e,x,y,n,t,n,f,c,b,e,e,?,s,s,w,w,p,w,t,e,w,c,w
-e,x,y,e,t,n,f,c,b,e,e,?,s,s,e,w,p,w,t,e,w,c,w
-p,x,s,w,t,f,f,c,b,w,t,b,s,f,w,w,p,w,o,p,h,v,u
-p,x,y,y,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,v,g
-p,f,s,b,t,n,f,c,b,g,e,b,s,s,w,w,p,w,t,p,r,v,m
-p,f,f,y,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,v,p
-p,x,s,g,t,f,f,c,b,p,t,b,s,f,w,w,p,w,o,p,h,v,g
-e,f,s,p,t,n,f,c,b,w,e,?,s,s,e,w,p,w,t,e,w,c,w
-e,x,y,n,t,n,f,c,b,e,e,?,s,s,w,e,p,w,t,e,w,c,w
-p,b,y,w,t,n,f,c,b,r,e,b,s,s,w,w,p,w,t,p,r,v,m
-p,f,y,y,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,v,d
-p,x,y,e,f,y,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,d
-p,x,s,b,t,f,f,c,b,h,t,b,s,f,w,w,p,w,o,p,h,s,g
-p,x,s,b,t,f,f,c,b,h,t,b,s,s,w,w,p,w,o,p,h,v,g
-p,x,y,n,f,s,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,p
-p,b,y,p,t,n,f,c,b,g,e,b,s,s,w,w,p,w,t,p,r,v,g
-e,x,y,c,f,n,f,w,n,w,e,b,s,s,w,n,p,w,o,e,w,v,l
-p,x,y,n,f,f,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,d
-p,x,s,w,t,f,f,c,b,w,t,b,s,s,w,w,p,w,o,p,h,s,u
-p,x,y,n,f,f,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,d
-e,f,y,n,f,n,f,w,n,w,e,b,f,s,w,n,p,w,o,e,w,v,l
-p,x,y,n,f,y,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,p
-p,f,s,g,t,f,f,c,b,p,t,b,s,s,w,w,p,w,o,p,h,s,u
-p,x,y,e,f,y,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,d
-e,x,y,r,f,n,f,c,n,h,e,?,s,f,w,w,p,w,o,f,h,y,d
-p,b,y,p,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,r,v,m
-e,x,s,p,t,n,f,c,b,w,e,?,s,s,e,w,p,w,t,e,w,c,w
-p,x,y,n,f,y,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,l
-p,x,y,n,f,f,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,p
-p,f,s,g,t,f,f,c,b,w,t,b,f,f,w,w,p,w,o,p,h,v,g
-p,f,y,y,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,v,p
-p,f,y,p,t,n,f,c,b,r,e,b,s,s,w,w,p,w,t,p,r,v,m
-p,x,y,n,f,s,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,p
-p,x,y,n,f,s,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,d
-e,f,s,b,t,n,f,c,b,e,e,?,s,s,e,w,p,w,t,e,w,c,w
-p,f,s,b,t,f,f,c,b,p,t,b,s,s,w,w,p,w,o,p,h,v,u
-e,x,y,c,f,n,f,w,n,w,e,b,s,f,w,n,p,w,o,e,w,v,l
-e,x,y,w,f,n,f,c,n,w,e,?,s,f,w,w,p,w,o,f,h,y,d
-p,x,y,n,f,y,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,l
-e,f,y,n,f,n,f,w,n,w,e,b,s,s,w,n,p,w,o,e,w,v,l
-p,x,y,n,f,f,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,d
-p,x,y,n,f,s,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,d
-e,k,y,p,t,n,f,c,b,e,e,?,s,s,e,w,p,w,t,e,w,c,w
-p,f,s,w,t,n,f,c,b,r,e,b,s,s,w,w,p,w,t,p,r,v,m
-p,x,y,n,f,f,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,l
-p,f,y,y,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,v,g
-e,k,y,n,f,n,f,w,n,w,e,b,s,f,w,n,p,w,o,e,w,v,l
-p,f,s,g,t,f,f,c,b,p,t,b,f,s,w,w,p,w,o,p,h,v,g
-p,f,s,w,t,n,f,c,b,r,e,b,s,s,w,w,p,w,t,p,r,v,g
-p,f,f,g,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,y,g
-e,f,s,n,t,n,f,c,b,w,e,?,s,s,w,e,p,w,t,e,w,c,w
-p,x,f,y,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,y,d
-p,x,y,n,f,s,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,l
-p,x,y,e,f,y,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,p
-p,f,y,y,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,v,g
-p,b,s,w,t,n,f,c,b,g,e,b,s,s,w,w,p,w,t,p,r,v,g
-p,x,s,b,t,f,f,c,b,p,t,b,s,f,w,w,p,w,o,p,h,v,u
-e,f,y,b,t,n,f,c,b,w,e,?,s,s,w,e,p,w,t,e,w,c,w
-e,x,f,c,f,n,f,w,n,w,e,b,s,f,w,n,p,w,o,e,w,v,l
-p,f,s,w,t,f,f,c,b,h,t,b,f,s,w,w,p,w,o,p,h,v,u
-e,k,s,p,t,n,f,c,b,e,e,?,s,s,e,w,p,w,t,e,w,c,w
-e,k,y,n,t,n,f,c,b,e,e,?,s,s,e,w,p,w,t,e,w,c,w
-e,x,y,b,t,n,f,c,b,e,e,?,s,s,e,e,p,w,t,e,w,c,w
-p,x,f,y,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,y,g
-p,f,s,g,t,f,f,c,b,h,t,b,f,s,w,w,p,w,o,p,h,v,g
-p,f,y,y,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,v,p
-e,x,y,e,t,n,f,c,b,e,e,?,s,s,w,e,p,w,t,e,w,c,w
-p,f,s,b,t,f,f,c,b,p,t,b,f,s,w,w,p,w,o,p,h,v,g
-e,x,y,n,f,n,f,w,n,w,e,b,s,s,w,n,p,w,o,e,w,v,l
-p,f,y,y,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,v,d
-p,f,f,g,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,v,d
-p,b,y,b,t,n,f,c,b,r,e,b,s,s,w,w,p,w,t,p,r,v,g
-p,x,y,e,f,y,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,p
-p,f,y,y,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,y,p
-p,f,f,y,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,v,p
-p,f,y,w,t,n,f,c,b,g,e,b,s,s,w,w,p,w,t,p,r,v,g
-e,k,s,e,t,n,f,c,b,e,e,?,s,s,w,e,p,w,t,e,w,c,w
-p,f,y,y,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,v,p
-e,k,s,n,t,n,f,c,b,e,e,?,s,s,w,e,p,w,t,e,w,c,w
-p,f,y,y,f,n,f,c,n,w,e,?,k,y,w,n,p,w,o,e,w,v,d
-p,f,y,g,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,y,p
-p,f,y,y,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,v,g
-p,x,y,e,f,y,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,p
-e,f,s,b,t,n,f,c,b,w,e,?,s,s,e,e,p,w,t,e,w,c,w
-p,f,s,w,t,f,f,c,b,w,t,b,f,s,w,w,p,w,o,p,h,v,u
-p,x,s,w,t,f,f,c,b,w,t,b,f,f,w,w,p,w,o,p,h,s,g
-p,f,y,g,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,y,d
-e,f,y,p,t,n,f,c,b,w,e,?,s,s,e,w,p,w,t,e,w,c,w
-p,f,y,y,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,y,g
-p,f,y,y,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,y,p
-e,k,s,e,t,n,f,c,b,w,e,?,s,s,e,e,p,w,t,e,w,c,w
-e,x,y,n,t,n,f,c,b,e,e,?,s,s,e,e,p,w,t,e,w,c,w
-p,x,y,y,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,v,g
-p,f,s,b,t,f,f,c,b,p,t,b,s,f,w,w,p,w,o,p,h,v,g
-p,x,y,g,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,v,d
-p,f,s,g,t,f,f,c,b,w,t,b,f,f,w,w,p,w,o,p,h,s,u
-p,x,s,w,t,f,f,c,b,p,t,b,f,s,w,w,p,w,o,p,h,v,g
-p,x,f,n,f,n,f,c,n,w,e,?,k,y,w,y,p,w,o,e,w,v,d
-p,f,s,b,t,f,f,c,b,h,t,b,s,f,w,w,p,w,o,p,h,s,g
-e,f,y,e,t,n,f,c,b,e,e,?,s,s,e,e,p,w,t,e,w,c,w
-p,f,y,y,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,y,g
-p,x,s,b,t,f,f,c,b,p,t,b,s,f,w,w,p,w,o,p,h,s,u
-e,k,f,n,f,n,f,w,n,w,e,b,f,s,w,n,p,w,o,e,w,v,l
-p,f,y,y,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,y,g
-e,k,y,c,f,n,f,w,n,w,e,b,f,f,w,n,p,w,o,e,w,v,l
-p,x,f,g,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,y,p
-p,x,y,n,f,y,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,p
-e,f,y,p,t,n,f,c,b,w,e,?,s,s,w,w,p,w,t,e,w,c,w
-p,x,s,b,t,f,f,c,b,h,t,b,f,f,w,w,p,w,o,p,h,v,g
-p,x,y,g,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,y,d
-p,f,y,y,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,y,g
-p,x,y,e,f,y,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,d
-e,f,y,r,f,n,f,c,n,w,e,?,s,f,w,w,p,w,o,f,h,y,d
-p,f,y,y,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,y,p
-p,x,s,g,t,f,f,c,b,h,t,b,f,f,w,w,p,w,o,p,h,v,g
-p,x,s,w,t,f,f,c,b,w,t,b,f,f,w,w,p,w,o,p,h,v,g
-p,f,s,b,t,f,f,c,b,w,t,b,f,s,w,w,p,w,o,p,h,s,g
-e,x,y,n,t,n,f,c,b,w,e,?,s,s,e,e,p,w,t,e,w,c,w
-p,f,f,y,f,n,f,c,n,w,e,?,k,y,w,n,p,w,o,e,w,v,d
-e,f,y,u,f,n,f,c,n,w,e,?,s,f,w,w,p,w,o,f,h,v,d
-p,x,y,n,f,y,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,l
-e,x,s,n,t,n,f,c,b,w,e,?,s,s,w,w,p,w,t,e,w,c,w
-p,x,s,w,t,f,f,c,b,w,t,b,f,s,w,w,p,w,o,p,h,v,u
-p,f,y,y,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,v,d
-e,f,s,b,t,n,f,c,b,e,e,?,s,s,e,e,p,w,t,e,w,c,w
-e,k,f,c,f,n,f,w,n,w,e,b,s,s,w,n,p,w,o,e,w,v,l
-p,x,s,g,t,f,f,c,b,p,t,b,s,f,w,w,p,w,o,p,h,v,u
-p,f,y,g,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,v,p
-p,x,y,e,f,y,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,l
-e,f,y,p,t,n,f,c,b,e,e,?,s,s,e,w,p,w,t,e,w,c,w
-e,k,y,p,t,n,f,c,b,w,e,?,s,s,w,w,p,w,t,e,w,c,w
-p,x,y,y,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,v,g
-p,x,y,n,f,y,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,d
-e,f,s,n,t,n,f,c,b,e,e,?,s,s,e,w,p,w,t,e,w,c,w
-p,f,y,y,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,y,g
-p,x,y,n,f,s,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,d
-e,f,s,n,t,n,f,c,b,w,e,?,s,s,e,e,p,w,t,e,w,c,w
-p,f,y,y,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,v,d
-p,f,y,y,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,v,p
-p,f,y,y,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,y,p
-e,f,f,c,f,n,f,w,n,w,e,b,f,s,w,n,p,w,o,e,w,v,l
-p,b,y,b,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,r,v,g
-e,k,y,e,t,n,f,c,b,w,e,?,s,s,e,w,p,w,t,e,w,c,w
-p,x,s,b,t,f,f,c,b,w,t,b,f,f,w,w,p,w,o,p,h,v,u
-p,x,s,g,t,f,f,c,b,w,t,b,s,s,w,w,p,w,o,p,h,v,g
-p,f,y,y,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,v,g
-p,x,s,g,t,f,f,c,b,p,t,b,s,s,w,w,p,w,o,p,h,s,u
-p,x,y,e,f,y,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,p
-p,f,s,w,t,f,f,c,b,p,t,b,f,f,w,w,p,w,o,p,h,v,u
-e,f,s,p,t,n,f,c,b,w,e,?,s,s,w,w,p,w,t,e,w,c,w
-p,f,s,b,t,f,f,c,b,h,t,b,s,s,w,w,p,w,o,p,h,v,g
-p,x,y,e,f,y,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,p
-e,f,y,w,f,n,f,c,n,u,e,?,s,f,w,w,p,w,o,f,h,v,d
-e,x,y,p,t,n,f,c,b,w,e,?,s,s,w,e,p,w,t,e,w,c,w
-p,x,s,b,t,f,f,c,b,p,t,b,s,s,w,w,p,w,o,p,h,v,u
-p,k,g,w,t,n,f,w,n,w,e,b,s,s,w,w,p,w,o,p,w,c,l
-p,f,s,g,t,f,f,c,b,p,t,b,s,s,w,w,p,w,o,p,h,s,g
-p,x,y,n,f,s,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,p
-e,k,s,e,t,n,f,c,b,w,e,?,s,s,w,w,p,w,t,e,w,c,w
-p,x,y,n,f,y,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,d
-p,x,y,n,f,s,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,p
-p,f,s,w,t,f,f,c,b,h,t,b,f,f,w,w,p,w,o,p,h,s,g
-e,f,s,e,t,n,f,c,b,e,e,?,s,s,w,e,p,w,t,e,w,c,w
-p,f,s,b,t,f,f,c,b,p,t,b,s,s,w,w,p,w,o,p,h,s,g
-p,b,s,w,t,n,f,c,b,r,e,b,s,s,w,w,p,w,t,p,r,v,m
-e,f,f,n,f,n,f,w,n,w,e,b,f,s,w,n,p,w,o,e,w,v,l
-p,f,s,g,t,f,f,c,b,p,t,b,s,s,w,w,p,w,o,p,h,v,u
-p,x,s,w,t,f,f,c,b,h,t,b,s,s,w,w,p,w,o,p,h,v,g
-p,f,y,y,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,v,g
-e,x,s,b,t,n,f,c,b,e,e,?,s,s,e,w,p,w,t,e,w,c,w
-e,f,y,n,t,n,f,c,b,w,e,?,s,s,e,w,p,w,t,e,w,c,w
-e,x,y,r,f,n,f,c,n,u,e,?,s,f,w,w,p,w,o,f,h,v,d
-e,k,s,b,t,n,f,c,b,e,e,?,s,s,e,e,p,w,t,e,w,c,w
-p,x,s,w,t,f,f,c,b,h,t,b,s,f,w,w,p,w,o,p,h,v,u
-p,x,y,n,f,y,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,d
-e,x,y,b,t,n,f,c,b,w,e,?,s,s,e,e,p,w,t,e,w,c,w
-p,f,y,w,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,r,v,g
-p,f,s,b,t,f,f,c,b,w,t,b,s,f,w,w,p,w,o,p,h,s,u
-p,x,s,g,t,f,f,c,b,p,t,b,s,s,w,w,p,w,o,p,h,v,g
-e,k,s,n,t,n,f,c,b,w,e,?,s,s,w,w,p,w,t,e,w,c,w
-p,f,s,g,t,f,f,c,b,p,t,b,s,f,w,w,p,w,o,p,h,s,g
-e,k,y,p,t,n,f,c,b,e,e,?,s,s,e,e,p,w,t,e,w,c,w
-p,f,y,b,t,n,f,c,b,g,e,b,s,s,w,w,p,w,t,p,r,v,m
-p,x,y,n,f,s,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,d
-p,f,y,y,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,v,p
-p,x,s,b,t,f,f,c,b,w,t,b,f,f,w,w,p,w,o,p,h,s,u
-e,k,f,c,f,n,f,w,n,w,e,b,s,f,w,n,p,w,o,e,w,v,l
-p,f,s,g,t,f,f,c,b,p,t,b,s,f,w,w,p,w,o,p,h,v,u
-p,f,s,w,t,f,f,c,b,w,t,b,f,s,w,w,p,w,o,p,h,v,g
-p,b,s,b,t,n,f,c,b,r,e,b,s,s,w,w,p,w,t,p,r,v,m
-p,b,s,p,t,n,f,c,b,r,e,b,s,s,w,w,p,w,t,p,r,v,g
-p,f,s,w,t,f,f,c,b,w,t,b,s,f,w,w,p,w,o,p,h,s,g
-e,x,y,n,t,n,f,c,b,w,e,?,s,s,w,e,p,w,t,e,w,c,w
-p,x,y,n,f,s,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,l
-p,x,y,g,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,v,g
-e,x,s,b,t,n,f,c,b,w,e,?,s,s,w,w,p,w,t,e,w,c,w
-p,x,y,n,f,f,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,l
-p,x,s,b,t,f,f,c,b,p,t,b,s,s,w,w,p,w,o,p,h,s,u
-p,f,s,p,t,n,f,c,b,g,e,b,s,s,w,w,p,w,t,p,r,v,g
-p,x,y,n,f,y,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,l
-p,x,s,w,t,f,f,c,b,h,t,b,f,f,w,w,p,w,o,p,h,v,u
-p,f,s,w,t,f,f,c,b,h,t,b,s,s,w,w,p,w,o,p,h,v,u
-e,f,y,r,f,n,f,c,n,u,e,?,s,f,w,w,p,w,o,f,h,y,d
-p,f,s,w,t,f,f,c,b,h,t,b,f,s,w,w,p,w,o,p,h,s,g
-p,f,s,w,t,f,f,c,b,p,t,b,f,s,w,w,p,w,o,p,h,s,u
-e,x,y,w,f,n,f,c,n,u,e,?,s,f,w,w,p,w,o,f,h,v,d
-p,x,y,n,f,s,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,d
-e,k,y,b,t,n,f,c,b,w,e,?,s,s,w,w,p,w,t,e,w,c,w
-p,x,y,n,f,f,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,d
-p,x,y,n,f,y,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,l
-p,x,s,b,t,f,f,c,b,p,t,b,f,f,w,w,p,w,o,p,h,s,u
-e,f,y,r,f,n,f,c,n,p,e,?,s,f,w,w,p,w,o,f,h,y,d
-p,b,s,p,t,n,f,c,b,g,e,b,s,s,w,w,p,w,t,p,r,v,g
-p,x,s,b,t,f,f,c,b,p,t,b,f,f,w,w,p,w,o,p,h,v,u
-e,f,y,c,f,n,f,w,n,w,e,b,f,f,w,n,p,w,o,e,w,v,l
-e,x,y,u,f,n,f,c,n,p,e,?,s,f,w,w,p,w,o,f,h,v,d
-p,x,s,b,t,f,f,c,b,w,t,b,s,f,w,w,p,w,o,p,h,v,u
-e,x,y,w,f,n,f,c,n,w,e,?,s,f,w,w,p,w,o,f,h,v,d
-p,x,y,n,f,y,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,l
-e,x,s,p,t,n,f,c,b,e,e,?,s,s,e,w,p,w,t,e,w,c,w
-p,f,s,p,t,n,f,c,b,r,e,b,s,s,w,w,p,w,t,p,r,v,m
-p,f,y,n,f,n,f,c,n,w,e,?,k,y,w,y,p,w,o,e,w,v,d
-p,f,y,y,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,y,p
-p,f,s,w,t,f,f,c,b,h,t,b,s,f,w,w,p,w,o,p,h,v,g
-p,x,y,g,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,v,g
-e,f,y,e,t,n,f,c,b,w,e,?,s,s,e,e,p,w,t,e,w,c,w
-p,f,s,b,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,r,v,g
-p,b,s,p,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,r,v,m
-p,x,y,n,f,s,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,l
-p,x,s,b,t,f,f,c,b,p,t,b,f,s,w,w,p,w,o,p,h,v,u
-p,x,y,y,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,y,p
-p,b,y,w,t,n,f,c,b,g,e,b,s,s,w,w,p,w,t,p,r,v,g
-e,f,y,n,f,n,f,w,n,w,e,b,s,f,w,n,p,w,o,e,w,v,l
-p,f,f,y,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,y,g
-p,f,y,y,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,v,p
-p,f,y,y,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,v,g
-p,x,s,g,t,f,f,c,b,h,t,b,s,f,w,w,p,w,o,p,h,s,u
-p,x,y,e,f,y,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,l
-p,x,y,y,f,n,f,c,n,w,e,?,k,y,w,y,p,w,o,e,w,v,d
-p,x,y,n,f,f,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,p
-p,f,s,w,t,f,f,c,b,h,t,b,s,f,w,w,p,w,o,p,h,s,g
-p,f,f,g,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,y,g
-e,f,f,n,f,n,f,w,n,w,e,b,s,s,w,n,p,w,o,e,w,v,l
-p,x,s,g,t,f,f,c,b,w,t,b,f,s,w,w,p,w,o,p,h,s,g
-p,f,s,b,t,f,f,c,b,h,t,b,f,f,w,w,p,w,o,p,h,v,u
-e,f,s,e,t,n,f,c,b,e,e,?,s,s,e,e,p,w,t,e,w,c,w
-p,x,s,b,t,f,f,c,b,h,t,b,s,s,w,w,p,w,o,p,h,s,u
-p,f,y,y,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,v,d
-p,x,y,n,f,f,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,d
-p,f,s,w,t,f,f,c,b,p,t,b,s,s,w,w,p,w,o,p,h,v,g
-p,f,s,g,t,f,f,c,b,h,t,b,s,s,w,w,p,w,o,p,h,s,g
-p,f,y,y,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,v,g
-e,f,f,c,f,n,f,w,n,w,e,b,s,s,w,n,p,w,o,e,w,v,l
-p,x,y,n,f,s,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,l
-e,k,y,e,t,n,f,c,b,e,e,?,s,s,w,e,p,w,t,e,w,c,w
-p,x,y,n,f,y,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,l
-p,f,s,w,t,f,f,c,b,p,t,b,s,f,w,w,p,w,o,p,h,s,g
-p,x,y,e,f,y,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,l
-e,x,s,n,t,n,f,c,b,w,e,?,s,s,e,e,p,w,t,e,w,c,w
-p,f,y,g,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,v,p
-p,x,s,w,t,f,f,c,b,w,t,b,f,f,w,w,p,w,o,p,h,v,u
-e,f,s,e,t,n,f,c,b,w,e,?,s,s,e,e,p,w,t,e,w,c,w
-p,f,y,y,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,v,d
-p,f,y,y,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,v,d
-e,f,y,e,t,n,f,c,b,e,e,?,s,s,e,w,p,w,t,e,w,c,w
-p,f,s,g,t,f,f,c,b,p,t,b,f,f,w,w,p,w,o,p,h,v,g
-e,k,s,n,t,n,f,c,b,e,e,?,s,s,e,w,p,w,t,e,w,c,w
-p,f,y,y,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,y,d
-p,x,s,w,t,f,f,c,b,w,t,b,s,f,w,w,p,w,o,p,h,s,g
-p,x,y,n,f,s,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,p
-p,f,y,y,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,y,g
-p,f,y,w,t,n,f,c,b,r,e,b,s,s,w,w,p,w,t,p,r,v,g
-p,x,y,n,f,s,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,d
-e,f,y,w,f,n,f,c,n,w,e,?,s,f,w,w,p,w,o,f,h,v,d
-e,x,y,w,f,n,f,c,n,h,e,?,s,f,w,w,p,w,o,f,h,y,d
-p,x,y,n,f,f,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,p
-p,f,s,g,t,f,f,c,b,p,t,b,f,s,w,w,p,w,o,p,h,s,u
-e,x,y,u,f,n,f,c,n,w,e,?,s,f,w,w,p,w,o,f,h,v,d
-p,f,s,w,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,r,v,g
-p,x,y,y,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,v,d
-p,f,s,g,t,f,f,c,b,h,t,b,s,f,w,w,p,w,o,p,h,v,g
-p,f,y,y,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,y,p
-p,x,f,y,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,v,g
-p,f,y,y,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,y,d
-p,x,s,g,t,f,f,c,b,p,t,b,f,f,w,w,p,w,o,p,h,s,g
-e,f,y,r,f,n,f,c,n,u,e,?,s,f,w,w,p,w,o,f,h,v,d
-p,f,s,w,t,f,f,c,b,w,t,b,f,s,w,w,p,w,o,p,h,s,g
-e,k,s,n,t,n,f,c,b,w,e,?,s,s,e,e,p,w,t,e,w,c,w
-p,x,y,n,f,s,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,d
-p,x,s,g,t,f,f,c,b,w,t,b,f,f,w,w,p,w,o,p,h,s,g
-p,f,y,y,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,y,g
-p,b,s,w,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,r,v,m
-p,x,y,n,f,f,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,d
-p,b,s,b,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,r,v,m
-p,f,f,y,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,y,g
-p,x,y,g,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,y,d
-e,x,y,e,t,n,f,c,b,e,e,?,s,s,w,w,p,w,t,e,w,c,w
-e,k,s,b,t,n,f,c,b,w,e,?,s,s,e,e,p,w,t,e,w,c,w
-p,x,y,n,f,f,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,l
-e,x,s,e,t,n,f,c,b,e,e,?,s,s,e,e,p,w,t,e,w,c,w
-p,f,s,b,t,n,f,c,b,r,e,b,s,s,w,w,p,w,t,p,r,v,m
-e,f,s,n,t,n,f,c,b,w,e,?,s,s,w,w,p,w,t,e,w,c,w
-p,x,y,e,f,y,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,p
-e,f,y,b,t,n,f,c,b,w,e,?,s,s,e,e,p,w,t,e,w,c,w
-p,x,s,g,t,f,f,c,b,w,t,b,s,s,w,w,p,w,o,p,h,s,u
-p,f,s,b,t,n,f,c,b,g,e,b,s,s,w,w,p,w,t,p,r,v,g
-p,x,y,n,f,s,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,l
-p,x,y,e,f,y,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,l
-p,f,y,y,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,y,g
-p,x,y,n,f,f,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,l
-p,x,y,n,f,s,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,d
-p,f,s,g,t,f,f,c,b,w,t,b,f,s,w,w,p,w,o,p,h,v,u
-e,x,y,p,t,n,f,c,b,w,e,?,s,s,e,w,p,w,t,e,w,c,w
-e,x,y,u,f,n,f,c,n,u,e,?,s,f,w,w,p,w,o,f,h,y,d
-p,f,y,y,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,y,p
-p,x,s,g,t,f,f,c,b,h,t,b,s,f,w,w,p,w,o,p,h,v,g
-p,x,y,n,f,s,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,l
-p,f,s,g,t,f,f,c,b,h,t,b,s,f,w,w,p,w,o,p,h,v,u
-p,x,s,b,t,f,f,c,b,h,t,b,s,f,w,w,p,w,o,p,h,v,g
-p,x,f,y,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,y,g
-p,b,s,w,t,n,f,c,b,r,e,b,s,s,w,w,p,w,t,p,r,v,g
-e,k,y,n,t,n,f,c,b,e,e,?,s,s,e,e,p,w,t,e,w,c,w
-p,x,y,e,f,y,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,d
-p,f,y,y,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,v,p
-e,x,y,b,t,n,f,c,b,w,e,?,s,s,e,w,p,w,t,e,w,c,w
-p,x,s,b,t,f,f,c,b,p,t,b,s,f,w,w,p,w,o,p,h,v,g
-p,f,s,g,t,f,f,c,b,w,t,b,f,f,w,w,p,w,o,p,h,v,u
-p,f,s,b,t,f,f,c,b,p,t,b,s,s,w,w,p,w,o,p,h,s,u
-p,f,y,y,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,y,d
-p,x,y,n,f,f,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,p
-p,f,s,w,t,f,f,c,b,h,t,b,f,f,w,w,p,w,o,p,h,v,u
-e,k,y,n,t,n,f,c,b,e,e,?,s,s,w,w,p,w,t,e,w,c,w
-e,x,s,e,t,n,f,c,b,w,e,?,s,s,e,e,p,w,t,e,w,c,w
-e,x,y,w,f,n,f,c,n,p,e,?,s,f,w,w,p,w,o,f,h,v,d
-p,f,s,b,t,f,f,c,b,p,t,b,f,s,w,w,p,w,o,p,h,s,u
-p,f,s,g,t,f,f,c,b,h,t,b,s,s,w,w,p,w,o,p,h,s,u
-e,f,s,e,t,n,f,c,b,e,e,?,s,s,w,w,p,w,t,e,w,c,w
-p,f,y,y,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,y,g
-p,x,y,n,f,s,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,l
-p,f,s,w,t,n,f,c,b,g,e,b,s,s,w,w,p,w,t,p,r,v,g
-p,f,y,g,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,y,d
-e,x,f,n,f,n,f,w,n,w,e,b,s,s,w,n,p,w,o,e,w,v,l
-p,f,y,y,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,y,d
-p,x,s,w,t,f,f,c,b,p,t,b,f,s,w,w,p,w,o,p,h,s,u
-e,x,s,b,t,n,f,c,b,w,e,?,s,s,e,e,p,w,t,e,w,c,w
-e,x,s,p,t,n,f,c,b,w,e,?,s,s,w,e,p,w,t,e,w,c,w
-p,x,s,g,t,f,f,c,b,w,t,b,f,f,w,w,p,w,o,p,h,v,g
-p,f,f,g,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,y,p
-p,x,s,w,t,f,f,c,b,h,t,b,s,f,w,w,p,w,o,p,h,s,g
-p,x,f,y,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,v,p
-p,f,s,w,t,f,f,c,b,p,t,b,s,s,w,w,p,w,o,p,h,s,u
-e,k,y,n,t,n,f,c,b,w,e,?,s,s,e,e,p,w,t,e,w,c,w
-e,x,y,p,t,n,f,c,b,e,e,?,s,s,w,w,p,w,t,e,w,c,w
-p,f,y,y,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,y,g
-e,f,y,b,t,n,f,c,b,e,e,?,s,s,w,w,p,w,t,e,w,c,w
-e,x,s,e,t,n,f,c,b,w,e,?,s,s,w,w,p,w,t,e,w,c,w
-p,x,s,g,t,f,f,c,b,p,t,b,f,f,w,w,p,w,o,p,h,v,u
-p,f,s,b,t,f,f,c,b,w,t,b,s,s,w,w,p,w,o,p,h,v,u
-p,f,y,y,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,v,p
-p,x,s,b,t,f,f,c,b,h,t,b,s,f,w,w,p,w,o,p,h,v,u
-p,f,s,w,t,f,f,c,b,h,t,b,s,s,w,w,p,w,o,p,h,v,g
-p,f,y,y,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,v,g
-p,f,y,y,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,y,d
-p,f,y,g,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,y,p
-p,f,f,g,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,v,p
-p,b,s,p,t,n,f,c,b,g,e,b,s,s,w,w,p,w,t,p,r,v,m
-p,x,y,n,f,f,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,l
-p,b,s,w,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,r,v,g
-e,x,y,n,t,n,f,c,b,w,e,?,s,s,w,w,p,w,t,e,w,c,w
-p,x,y,n,f,y,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,p
-p,x,y,n,f,y,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,p
-p,x,y,n,f,f,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,p
-p,f,s,b,t,f,f,c,b,w,t,b,s,s,w,w,p,w,o,p,h,s,g
-p,b,y,y,f,n,f,c,n,w,e,?,k,y,w,n,p,w,o,e,w,v,d
-p,x,s,g,t,f,f,c,b,w,t,b,f,f,w,w,p,w,o,p,h,v,u
-e,f,y,p,t,n,f,c,b,e,e,?,s,s,w,w,p,w,t,e,w,c,w
-e,k,s,p,t,n,f,c,b,e,e,?,s,s,e,e,p,w,t,e,w,c,w
-e,f,y,n,t,n,f,c,b,w,e,?,s,s,e,e,p,w,t,e,w,c,w
-p,b,y,p,t,n,f,c,b,g,e,b,s,s,w,w,p,w,t,p,r,v,m
-p,x,s,g,t,f,f,c,b,w,t,b,s,f,w,w,p,w,o,p,h,v,g
-p,x,s,g,t,f,f,c,b,p,t,b,s,s,w,w,p,w,o,p,h,s,g
-p,x,f,y,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,v,g
-p,x,y,n,f,s,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,p
-p,f,y,b,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,r,v,g
-p,x,s,b,t,f,f,c,b,h,t,b,f,f,w,w,p,w,o,p,h,v,u
-p,b,s,b,t,n,f,c,b,r,e,b,s,s,w,w,p,w,t,p,r,v,g
-e,f,y,w,f,n,f,c,n,p,e,?,s,f,w,w,p,w,o,f,h,y,d
-e,x,y,n,f,n,f,w,n,w,e,b,f,s,w,n,p,w,o,e,w,v,l
-p,x,f,y,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,v,p
-p,f,s,b,t,f,f,c,b,h,t,b,s,f,w,w,p,w,o,p,h,v,g
-p,f,y,y,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,v,g
-e,k,s,e,t,n,f,c,b,e,e,?,s,s,e,e,p,w,t,e,w,c,w
-p,f,f,y,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,y,p
-p,b,f,n,f,n,f,c,n,w,e,?,k,y,w,n,p,w,o,e,w,v,d
-p,x,y,n,f,s,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,p
-p,f,y,y,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,y,d
-p,x,y,y,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,y,g
-p,x,s,g,t,f,f,c,b,p,t,b,f,s,w,w,p,w,o,p,h,s,g
-p,x,y,n,f,f,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,d
-e,x,y,n,t,n,f,c,b,w,e,?,s,s,e,w,p,w,t,e,w,c,w
-e,f,y,n,f,n,f,w,n,w,e,b,f,f,w,n,p,w,o,e,w,v,l
-p,f,y,b,t,n,f,c,b,g,e,b,s,s,w,w,p,w,t,p,r,v,g
-p,k,f,n,f,n,f,c,n,w,e,?,k,y,w,y,p,w,o,e,w,v,d
-p,f,y,g,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,v,g
-p,x,y,n,f,s,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,p
-p,f,f,g,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,y,d
-p,f,f,g,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,v,d
-p,x,s,g,t,f,f,c,b,p,t,b,s,f,w,w,p,w,o,p,h,s,g
-e,x,y,r,f,n,f,c,n,u,e,?,s,f,w,w,p,w,o,f,h,y,d
-p,x,y,n,f,y,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,p
-p,f,s,p,t,n,f,c,b,g,e,b,s,s,w,w,p,w,t,p,r,v,m
-p,x,y,e,f,y,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,l
-p,f,s,b,t,f,f,c,b,w,t,b,s,s,w,w,p,w,o,p,h,v,g
-p,x,s,b,t,f,f,c,b,w,t,b,s,s,w,w,p,w,o,p,h,v,u
-e,k,s,e,t,n,f,c,b,e,e,?,s,s,w,w,p,w,t,e,w,c,w
-p,f,s,b,t,f,f,c,b,w,t,b,s,f,w,w,p,w,o,p,h,s,g
-e,x,f,n,f,n,f,w,n,w,e,b,f,f,w,n,p,w,o,e,w,v,l
-p,f,y,y,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,v,d
-e,f,y,w,f,n,f,c,n,w,e,?,s,f,w,w,p,w,o,f,h,y,d
-p,f,s,w,t,f,f,c,b,w,t,b,s,s,w,w,p,w,o,p,h,s,u
-p,f,s,p,t,n,f,c,b,r,e,b,s,s,w,w,p,w,t,p,r,v,g
-p,f,y,y,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,y,d
-p,f,s,g,t,f,f,c,b,h,t,b,s,f,w,w,p,w,o,p,h,s,g
-p,x,y,n,f,f,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,l
-p,f,y,y,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,v,g
-p,f,s,g,t,f,f,c,b,h,t,b,f,s,w,w,p,w,o,p,h,s,g
-p,f,s,w,t,f,f,c,b,h,t,b,s,f,w,w,p,w,o,p,h,s,u
-p,x,y,n,f,s,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,d
-p,f,f,y,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,y,d
-p,x,y,n,f,f,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,d
-p,b,y,b,t,n,f,c,b,g,e,b,s,s,w,w,p,w,t,p,r,v,g
-p,f,y,y,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,v,g
-p,x,y,e,f,y,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,l
-p,x,y,n,f,f,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,d
-e,k,y,b,t,n,f,c,b,e,e,?,s,s,w,w,p,w,t,e,w,c,w
-e,x,y,w,f,n,f,c,n,p,e,?,s,f,w,w,p,w,o,f,h,y,d
-p,f,s,b,t,f,f,c,b,p,t,b,s,f,w,w,p,w,o,p,h,v,u
-p,f,y,y,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,y,g
-p,x,y,e,f,f,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,p
-p,f,s,e,f,s,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,d
-p,x,y,e,f,f,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,d
-p,x,s,n,f,f,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,p
-p,f,y,n,f,f,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,p
-p,f,y,e,f,s,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,d
-p,k,y,n,f,y,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,l
-p,f,y,e,f,y,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,p
-p,x,s,e,f,s,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,l
-p,f,y,n,f,f,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,p
-p,f,y,e,f,f,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,d
-p,x,s,e,f,s,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,l
-p,x,s,n,f,s,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,l
-p,f,s,e,f,f,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,d
-p,f,s,e,f,y,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,l
-p,f,y,e,f,f,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,p
-p,f,y,n,f,y,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,p
-p,f,y,n,f,f,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,p
-p,x,y,e,f,s,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,d
-p,k,s,n,f,y,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,p
-p,x,s,n,f,f,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,l
-p,f,s,e,f,y,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,d
-p,f,y,e,f,s,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,l
-p,k,s,e,f,y,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,p
-p,f,y,e,f,y,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,d
-p,x,s,n,f,s,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,p
-p,f,s,e,f,y,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,d
-p,f,s,n,f,y,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,l
-p,f,y,e,f,s,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,d
-p,f,s,e,f,y,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,l
-p,f,s,n,f,s,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,p
-p,f,y,e,f,s,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,p
-p,x,s,e,f,s,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,l
-p,f,s,e,f,f,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,p
-p,x,s,n,f,s,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,d
-p,f,y,n,f,s,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,p
-p,x,y,e,f,f,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,l
-p,f,y,n,f,y,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,l
-e,b,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,n,o,p,n,c,l
-p,f,s,n,f,y,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,d
-e,x,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,n,o,p,n,v,l
-p,f,y,e,f,y,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,d
-p,f,y,n,f,s,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,l
-p,f,y,e,f,y,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,p
-p,f,s,n,f,y,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,l
-p,x,y,e,f,y,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,l
-p,x,s,n,f,f,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,d
-p,f,s,n,f,s,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,d
-p,f,y,e,f,y,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,d
-p,f,s,n,f,f,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,d
-p,f,s,n,f,s,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,p
-p,f,s,n,f,f,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,l
-p,f,y,e,f,s,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,d
-p,f,s,e,f,y,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,l
-p,x,y,e,f,f,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,l
-p,x,s,e,f,y,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,d
-p,f,s,e,f,f,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,d
-p,f,y,n,f,y,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,p
-p,x,y,e,f,f,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,d
-p,k,s,e,f,s,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,l
-p,k,s,n,f,f,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,l
-p,f,y,n,f,f,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,d
-p,x,s,e,f,s,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,l
-p,x,y,e,f,f,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,l
-p,f,y,e,f,s,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,p
-p,f,y,n,f,s,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,l
-p,f,s,e,f,f,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,p
-p,x,s,n,f,s,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,p
-e,x,f,w,f,n,f,w,b,w,e,?,k,k,w,w,p,w,t,p,w,n,g
-p,f,y,n,f,y,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,p
-p,x,s,n,f,f,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,d
-p,x,s,e,f,f,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,d
-p,f,s,n,f,s,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,l
-p,f,s,n,f,s,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,l
-p,x,y,e,f,s,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,l
-p,x,s,e,f,f,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,d
-p,f,s,n,f,y,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,l
-p,k,s,n,f,f,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,p
-p,f,y,n,f,s,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,p
-p,x,s,n,f,s,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,l
-p,f,y,e,f,y,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,l
-p,x,s,e,f,y,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,d
-p,f,s,n,f,s,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,l
-p,f,y,n,f,s,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,p
-p,k,y,n,f,y,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,l
-p,f,y,n,f,s,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,d
-p,f,s,n,f,y,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,d
-p,x,s,e,f,s,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,l
-p,f,s,n,f,y,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,l
-p,f,s,n,f,s,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,d
-p,f,s,e,f,f,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,l
-p,f,y,n,f,y,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,l
-p,f,s,n,f,f,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,d
-p,f,y,n,f,f,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,l
-p,f,s,e,f,y,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,l
-p,f,s,e,f,f,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,l
-p,x,y,e,f,f,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,p
-p,f,s,n,f,y,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,p
-p,x,y,e,f,y,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,d
-p,f,y,e,f,f,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,l
-p,x,s,n,f,f,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,p
-p,f,y,n,f,s,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,p
-p,f,y,e,f,f,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,d
-p,x,y,e,f,s,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,l
-p,f,y,n,f,y,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,d
-p,f,y,n,f,y,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,l
-p,x,y,e,f,s,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,l
-p,f,y,n,f,y,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,l
-p,f,s,n,f,s,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,l
-p,x,s,n,f,f,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,p
-p,f,y,e,f,y,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,p
-p,x,y,e,f,s,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,l
-p,x,s,e,f,f,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,p
-p,f,s,e,f,f,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,l
-p,f,y,e,f,f,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,d
-p,x,y,e,f,s,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,l
-p,x,y,e,f,s,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,l
-p,x,s,n,f,f,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,l
-p,x,s,e,f,f,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,l
-p,k,s,e,f,s,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,p
-e,x,s,c,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,w,v,p
-p,x,s,e,f,y,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,p
-p,f,s,n,f,y,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,d
-p,x,s,n,f,y,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,l
-p,f,y,n,f,s,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,p
-p,f,y,e,f,y,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,l
-p,f,s,e,f,s,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,l
-p,x,y,e,f,f,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,l
-p,x,s,e,f,f,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,l
-p,x,s,n,f,y,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,d
-p,x,y,e,f,f,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,d
-p,f,s,e,f,f,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,p
-p,f,s,n,f,y,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,l
-p,f,y,e,f,y,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,p
-p,x,y,e,f,s,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,l
-p,k,s,e,f,f,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,p
-p,f,y,n,f,s,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,d
-p,f,s,n,f,y,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,d
-p,x,s,e,f,y,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,p
-p,f,s,n,f,f,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,l
-p,f,s,e,f,s,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,p
-p,f,y,e,f,y,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,l
-e,b,f,w,f,n,f,w,b,g,e,?,s,s,w,w,p,w,t,p,w,n,g
-p,x,y,e,f,s,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,l
-p,x,y,e,f,f,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,p
-p,f,y,e,f,s,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,l
-p,f,y,e,f,y,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,l
-e,k,f,w,f,n,f,w,b,g,e,?,k,s,w,w,p,w,t,p,w,s,g
-p,x,s,n,f,s,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,p
-p,f,s,n,f,s,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,p
-p,x,s,n,f,s,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,d
-p,f,s,e,f,f,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,d
-p,x,s,n,f,f,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,p
-p,x,s,n,f,f,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,p
-p,x,s,e,f,s,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,p
-p,k,s,n,f,y,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,d
-p,x,y,e,f,f,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,d
-p,x,s,e,f,f,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,d
-p,x,y,e,f,f,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,d
-p,x,s,e,f,s,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,d
-p,x,s,e,f,f,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,d
-p,x,s,e,f,s,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,l
-p,x,s,e,f,s,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,d
-p,f,y,e,f,s,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,d
-p,x,s,n,f,y,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,d
-p,f,s,n,f,y,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,d
-p,x,s,e,f,f,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,d
-p,f,s,e,f,y,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,p
-p,x,s,n,f,y,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,l
-p,f,y,e,f,y,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,d
-p,x,s,n,f,y,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,l
-p,k,s,n,f,f,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,p
-p,f,s,e,f,f,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,l
-p,f,s,e,f,f,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,l
-p,f,y,e,f,f,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,p
-p,k,s,n,f,f,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,p
-p,f,y,n,f,f,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,l
-p,f,s,e,f,f,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,p
-p,x,y,e,f,s,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,d
-p,f,s,e,f,y,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,l
-p,f,y,e,f,f,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,l
-p,f,s,n,f,s,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,d
-p,f,s,n,f,f,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,d
-p,x,y,e,f,s,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,p
-p,x,y,e,f,s,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,l
-p,f,s,e,f,y,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,p
-p,k,y,e,f,f,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,d
-p,f,y,e,f,s,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,p
-p,k,y,n,f,s,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,d
-p,x,s,e,f,s,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,l
-p,x,y,e,f,s,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,p
-p,x,s,n,f,f,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,l
-p,f,y,n,f,f,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,l
-p,x,s,e,f,f,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,p
-e,b,s,g,f,n,f,w,b,w,e,?,s,k,w,w,p,w,t,p,w,n,g
-p,x,y,e,f,s,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,p
-p,x,s,e,f,s,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,d
-p,f,y,e,f,s,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,l
-p,x,s,e,f,y,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,p
-p,f,y,e,f,y,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,d
-p,f,y,e,f,f,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,l
-p,x,s,n,f,f,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,d
-p,f,y,n,f,f,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,p
-e,k,s,w,f,n,f,w,b,g,e,?,k,s,w,w,p,w,t,p,w,s,g
-p,f,y,e,f,s,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,p
-p,x,s,e,f,f,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,p
-p,f,y,n,f,y,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,l
-p,f,y,n,f,s,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,l
-p,x,s,e,f,y,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,l
-p,f,y,e,f,y,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,d
-p,f,y,e,f,f,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,p
-p,f,s,n,f,y,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,p
-p,x,y,e,f,f,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,p
-p,f,y,e,f,s,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,d
-e,b,f,g,f,n,f,w,b,p,e,?,k,s,w,w,p,w,t,p,w,n,g
-p,k,y,e,f,s,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,l
-p,x,y,e,f,f,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,l
-p,k,s,e,f,f,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,l
-p,x,s,n,f,s,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,p
-e,k,s,w,f,n,f,w,b,g,e,?,s,k,w,w,p,w,t,p,w,s,g
-p,f,s,e,f,s,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,d
-p,x,s,n,f,y,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,p
-p,k,s,e,f,f,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,p
-p,k,y,n,f,s,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,l
-p,k,y,e,f,f,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,p
-p,x,s,e,f,y,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,d
-p,f,s,e,f,s,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,p
-p,f,y,e,f,f,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,p
-p,x,y,e,f,s,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,d
-p,f,y,n,f,y,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,d
-p,x,y,e,f,y,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,d
-p,f,y,n,f,y,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,l
-p,x,y,e,f,y,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,l
-p,k,y,n,f,y,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,l
-p,f,s,e,f,f,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,l
-p,x,s,n,f,f,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,p
-p,f,y,n,f,f,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,d
-p,f,s,e,f,s,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,d
-p,x,s,e,f,y,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,d
-p,f,s,n,f,y,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,d
-p,f,y,n,f,s,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,l
-p,x,y,e,f,f,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,d
-p,x,s,e,f,f,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,d
-p,f,y,e,f,y,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,l
-p,f,s,e,f,y,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,l
-p,f,y,e,f,f,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,d
-p,x,s,n,f,s,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,l
-p,f,s,n,f,s,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,d
-p,x,s,n,f,f,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,l
-p,f,y,n,f,y,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,p
-p,k,y,n,f,s,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,d
-p,x,s,e,f,s,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,p
-p,f,y,n,f,f,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,d
-p,x,y,e,f,s,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,p
-p,f,y,e,f,s,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,d
-p,x,y,e,f,y,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,d
-p,f,y,e,f,y,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,l
-p,f,y,e,f,s,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,d
-p,f,s,e,f,s,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,l
-p,k,y,e,f,y,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,l
-p,x,s,e,f,f,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,p
-e,k,s,g,f,n,f,w,b,g,e,?,k,s,w,w,p,w,t,p,w,s,g
-p,x,s,n,f,y,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,l
-p,f,s,e,f,s,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,d
-p,f,s,n,f,y,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,d
-p,f,s,e,f,s,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,l
-p,f,y,n,f,s,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,p
-p,x,s,e,f,y,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,p
-p,x,s,e,f,s,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,p
-p,x,s,e,f,y,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,d
-p,x,s,e,f,f,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,p
-p,f,s,e,f,s,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,d
-p,x,s,e,f,s,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,p
-p,x,s,n,f,f,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,l
-p,f,s,n,f,s,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,l
-p,k,y,n,f,y,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,d
-p,x,y,e,f,s,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,p
-p,x,s,n,f,f,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,l
-p,x,s,e,f,f,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,p
-e,b,s,w,f,n,f,w,b,p,e,?,k,k,w,w,p,w,t,p,w,s,g
-p,f,s,e,f,s,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,p
-p,x,s,e,f,y,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,l
-p,x,y,e,f,f,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,d
-p,f,y,n,f,f,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,p
-p,f,s,e,f,y,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,l
-p,f,y,n,f,s,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,d
-p,f,y,e,f,f,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,d
-p,f,s,n,f,f,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,d
-p,x,y,e,f,f,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,l
-p,f,s,n,f,s,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,d
-p,x,s,n,f,y,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,l
-p,f,s,e,f,s,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,p
-p,x,s,n,f,s,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,d
-p,f,s,e,f,s,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,l
-p,x,y,e,f,y,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,p
-p,f,y,n,f,f,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,p
-p,x,y,e,f,f,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,l
-p,f,s,n,f,f,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,p
-p,f,s,n,f,y,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,p
-p,x,s,n,f,y,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,p
-p,f,s,e,f,s,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,d
-p,k,y,n,f,y,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,p
-p,x,y,e,f,f,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,l
-p,x,y,e,f,s,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,d
-p,k,s,n,f,s,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,d
-p,x,s,e,f,f,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,p
-p,x,s,e,f,f,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,p
-p,f,y,n,f,f,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,l
-p,f,s,e,f,y,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,d
-p,f,s,e,f,f,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,p
-p,f,s,n,f,f,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,l
-p,f,y,e,f,f,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,p
-p,f,y,e,f,f,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,l
-p,k,s,e,f,s,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,d
-p,x,s,n,f,f,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,d
-p,f,y,n,f,y,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,d
-p,f,s,e,f,f,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,p
-p,k,s,n,f,y,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,d
-p,f,y,n,f,f,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,p
-p,x,y,e,f,y,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,l
-p,x,y,e,f,f,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,p
-p,x,s,e,f,y,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,p
-p,f,y,n,f,f,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,l
-p,f,y,n,f,s,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,p
-p,f,y,e,f,s,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,p
-p,k,y,n,f,y,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,l
-p,x,s,n,f,s,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,d
-p,x,s,e,f,y,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,l
-p,x,s,n,f,f,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,l
-p,f,y,e,f,f,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,p
-p,k,y,e,f,f,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,d
-e,x,f,g,f,n,f,w,b,w,e,?,s,k,w,w,p,w,t,p,w,s,g
-p,f,y,e,f,f,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,l
-p,x,s,e,f,f,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,l
-p,x,s,e,f,s,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,l
-p,x,y,e,f,s,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,d
-p,f,y,n,f,f,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,p
-p,f,y,n,f,y,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,l
-p,x,s,n,f,s,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,p
-p,x,s,e,f,f,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,l
-p,f,y,e,f,f,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,d
-p,f,s,n,f,f,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,l
-p,f,y,n,f,f,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,l
-p,f,y,n,f,y,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,d
-p,x,y,e,f,f,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,l
-p,x,s,e,f,f,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,p
-p,f,s,n,f,f,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,l
-p,f,s,n,f,s,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,d
-p,x,s,e,f,f,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,d
-p,f,s,n,f,f,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,l
-p,f,s,n,f,f,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,p
-p,f,y,n,f,f,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,p
-p,x,s,n,f,s,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,l
-p,f,s,n,f,y,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,l
-p,f,s,n,f,y,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,p
-p,x,s,n,f,y,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,p
-p,f,y,n,f,f,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,p
-p,x,s,n,f,y,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,p
-p,x,s,n,f,y,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,p
-p,f,s,n,f,f,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,p
-p,f,y,n,f,f,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,d
-p,x,y,e,f,y,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,d
-p,f,s,e,f,f,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,l
-p,f,y,n,f,s,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,d
-p,f,s,e,f,y,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,l
-p,x,s,n,f,f,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,p
-e,k,f,w,f,n,f,w,b,p,e,?,k,k,w,w,p,w,t,p,w,n,g
-p,x,s,n,f,f,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,p
-p,f,s,n,f,f,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,d
-p,f,y,n,f,y,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,p
-p,f,s,n,f,f,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,d
-p,f,y,n,f,y,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,d
-p,f,y,n,f,f,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,l
-p,f,s,n,f,f,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,d
-p,f,y,e,f,y,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,p
-e,b,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,o,o,p,o,v,l
-p,f,s,e,f,f,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,d
-p,f,s,e,f,s,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,p
-p,x,s,e,f,y,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,l
-p,f,y,n,f,s,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,l
-p,f,y,e,f,s,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,p
-p,f,s,n,f,f,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,l
-p,f,y,e,f,s,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,l
-p,f,s,n,f,s,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,p
-p,k,y,n,f,y,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,p
-p,f,y,e,f,f,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,l
-p,f,s,e,f,s,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,d
-p,x,y,e,f,f,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,p
-p,f,s,n,f,y,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,l
-p,x,s,e,f,s,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,p
-p,k,y,n,f,y,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,p
-p,x,s,n,f,s,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,p
-p,x,y,e,f,f,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,d
-p,f,y,n,f,y,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,d
-p,f,s,e,f,y,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,d
-p,f,s,n,f,f,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,p
-e,x,s,w,f,n,f,w,b,g,e,?,k,s,w,w,p,w,t,p,w,s,g
-p,f,y,n,f,y,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,d
-p,f,s,n,f,f,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,l
-p,x,y,e,f,y,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,l
-p,f,s,e,f,s,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,l
-p,x,s,n,f,s,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,p
-p,x,y,e,f,f,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,l
-e,k,f,g,f,n,f,w,b,g,e,?,k,k,w,w,p,w,t,p,w,s,g
-p,f,s,e,f,s,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,l
-p,f,y,e,f,s,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,p
-e,b,f,w,f,n,f,w,b,w,e,?,s,k,w,w,p,w,t,p,w,s,g
-p,f,s,n,f,y,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,l
-p,x,y,e,f,s,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,l
-p,x,y,e,f,f,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,d
-p,f,s,n,f,s,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,p
-p,x,s,n,f,f,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,d
-p,f,s,e,f,s,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,p
-p,f,s,e,f,s,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,l
-p,f,s,e,f,s,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,l
-p,x,y,e,f,m,f,c,b,w,e,c,k,y,c,c,p,w,n,n,w,c,d
-p,f,y,n,f,y,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,d
-p,f,y,e,f,y,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,d
-p,x,s,e,f,s,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,p
-p,f,s,e,f,y,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,p
-p,f,y,e,f,f,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,p
-p,f,y,e,f,s,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,l
-p,x,s,e,f,f,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,l
-p,f,s,e,f,s,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,p
-e,x,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,n,o,p,y,c,l
-p,f,y,n,f,s,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,d
-p,f,y,e,f,s,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,l
-p,f,y,e,f,s,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,l
-p,x,s,n,f,f,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,p
-p,x,s,e,f,y,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,d
-p,x,y,e,f,f,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,d
-p,x,s,e,f,s,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,p
-p,x,y,e,f,y,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,p
-p,f,y,e,f,f,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,p
-e,x,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,n,o,p,o,v,l
-p,f,s,n,f,s,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,p
-p,x,y,e,f,f,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,p
-p,f,s,n,f,f,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,d
-p,f,y,e,f,y,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,p
-p,x,y,e,f,f,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,d
-p,x,s,n,f,s,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,d
-p,x,s,n,f,f,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,l
-p,x,s,n,f,y,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,l
-p,x,y,e,f,s,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,d
-p,f,s,n,f,y,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,l
-p,f,s,n,f,y,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,l
-p,k,y,e,f,y,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,l
-p,x,s,n,f,y,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,d
-p,f,y,n,f,y,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,p
-e,k,s,w,f,n,f,w,b,g,e,?,s,k,w,w,p,w,t,p,w,n,g
-p,f,s,e,f,y,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,l
-p,x,s,e,f,y,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,l
-p,f,y,n,f,f,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,d
-p,f,y,n,f,s,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,p
-p,f,y,e,f,y,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,d
-p,x,s,n,f,f,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,p
-p,f,y,n,f,s,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,l
-p,x,s,n,f,s,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,l
-p,k,y,n,f,y,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,l
-p,f,s,e,f,s,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,d
-p,f,y,e,f,s,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,l
-p,f,y,e,f,f,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,l
-p,f,y,e,f,s,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,p
-p,f,y,n,f,f,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,p
-p,f,y,n,f,f,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,l
-p,k,y,n,f,y,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,l
-p,k,y,n,f,y,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,p
-p,f,s,e,f,y,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,p
-p,x,s,e,f,y,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,d
-p,k,s,n,f,f,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,l
-p,x,s,e,f,f,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,p
-p,f,s,n,f,f,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,d
-p,x,s,e,f,s,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,l
-p,f,s,n,f,y,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,p
-p,x,s,n,f,f,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,l
-p,f,y,e,f,y,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,d
-p,x,s,n,f,f,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,l
-p,x,s,e,f,y,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,p
-p,x,s,e,f,s,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,d
-p,x,s,n,f,s,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,p
-p,x,y,e,f,s,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,d
-p,f,y,e,f,y,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,p
-p,f,s,e,f,s,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,d
-p,x,s,e,f,y,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,d
-p,f,y,e,f,y,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,l
-p,f,s,e,f,y,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,p
-p,f,s,e,f,s,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,l
-p,f,y,n,f,f,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,d
-p,f,s,n,f,s,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,p
-p,x,s,e,f,f,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,p
-p,f,y,n,f,s,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,l
-p,f,s,n,f,s,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,d
-p,f,y,n,f,f,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,l
-p,f,s,e,f,s,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,l
-p,f,y,e,f,f,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,p
-p,f,y,n,f,y,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,d
-p,f,s,e,f,y,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,d
-p,f,s,e,f,f,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,d
-p,f,y,n,f,s,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,d
-p,f,s,e,f,f,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,d
-p,f,s,n,f,f,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,d
-p,k,y,e,f,s,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,d
-p,f,y,e,f,f,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,d
-p,f,s,e,f,f,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,d
-p,f,y,e,f,y,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,d
-p,f,y,n,f,s,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,l
-p,f,y,e,f,f,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,p
-p,f,s,n,f,s,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,p
-p,x,y,e,f,y,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,d
-e,x,f,w,f,n,f,w,b,p,e,?,k,s,w,w,p,w,t,p,w,n,g
-p,x,s,e,f,s,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,d
-p,x,s,e,f,f,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,l
-p,f,y,n,f,y,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,l
-p,x,s,e,f,y,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,d
-p,x,s,n,f,y,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,d
-p,x,s,n,f,f,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,p
-p,x,s,n,f,y,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,l
-e,x,f,g,f,n,f,w,b,p,e,?,s,k,w,w,p,w,t,p,w,n,g
-p,f,y,e,f,f,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,l
-p,f,y,e,f,f,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,l
-p,x,y,e,f,y,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,p
-p,x,s,n,f,y,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,p
-p,k,s,e,f,y,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,p
-p,k,y,e,f,f,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,d
-p,f,y,e,f,s,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,d
-p,x,s,n,f,f,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,l
-p,f,s,n,f,s,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,l
-p,x,s,e,f,s,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,p
-p,f,y,n,f,y,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,d
-p,x,s,n,f,s,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,p
-e,k,f,w,f,n,f,w,b,p,e,?,s,k,w,w,p,w,t,p,w,n,g
-p,k,y,n,f,f,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,d
-p,f,s,e,f,f,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,d
-p,x,y,e,f,f,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,d
-p,x,s,n,f,s,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,l
-p,x,s,n,f,y,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,d
-p,f,y,e,f,y,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,l
-p,f,y,n,f,s,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,l
-p,x,y,e,f,y,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,l
-p,f,s,e,f,y,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,p
-p,x,s,e,f,s,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,d
-p,x,s,n,f,s,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,p
-p,f,y,n,f,f,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,l
-p,f,y,e,f,s,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,l
-p,f,y,n,f,s,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,l
-p,f,y,n,f,f,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,d
-p,f,s,n,f,y,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,p
-p,f,y,e,f,f,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,p
-p,f,y,e,f,s,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,l
-p,x,y,e,f,s,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,p
-p,k,s,e,f,f,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,p
-p,f,y,e,f,s,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,p
-p,f,y,n,f,y,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,p
-p,f,s,n,f,s,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,d
-p,f,s,e,f,y,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,p
-p,k,s,e,f,f,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,p
-p,f,y,e,f,s,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,p
-p,f,s,n,f,s,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,p
-e,x,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,o,o,p,b,v,l
-p,x,s,n,f,y,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,l
-p,k,s,n,f,y,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,l
-p,f,s,n,f,f,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,d
-p,f,s,n,f,y,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,d
-p,x,s,n,f,y,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,p
-p,f,s,n,f,y,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,d
-p,f,y,e,f,f,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,p
-p,f,y,e,f,y,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,d
-p,f,s,n,f,f,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,l
-p,x,s,e,f,f,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,l
-p,x,s,n,f,s,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,p
-p,x,s,e,f,y,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,p
-p,x,s,e,f,s,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,d
-p,f,y,e,f,f,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,l
-p,x,s,e,f,y,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,p
-p,k,s,e,f,y,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,l
-p,x,y,e,f,f,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,p
-p,f,s,n,f,f,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,p
-p,f,s,n,f,y,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,l
-p,x,s,n,f,f,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,d
-p,k,y,e,f,y,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,p
-p,x,s,e,f,y,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,l
-p,f,y,n,f,s,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,d
-p,f,y,e,f,s,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,d
-p,f,y,n,f,s,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,p
-p,f,s,n,f,s,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,l
-p,f,s,e,f,y,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,p
-p,k,y,n,f,y,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,p
-p,f,y,e,f,y,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,l
-p,f,s,e,f,s,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,p
-p,f,s,n,f,f,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,d
-p,x,s,n,f,y,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,l
-p,f,y,n,f,s,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,d
-p,f,y,e,f,s,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,d
-p,x,y,e,f,s,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,d
-p,f,s,e,f,s,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,p
-p,x,s,n,f,s,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,l
-p,f,s,n,f,y,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,p
-p,f,s,n,f,s,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,l
-p,f,s,e,f,y,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,p
-e,x,s,g,f,n,f,w,b,p,e,?,s,k,w,w,p,w,t,p,w,n,g
-p,k,y,n,f,y,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,p
-p,x,y,e,f,s,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,p
-p,x,s,n,f,s,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,p
-p,x,s,n,f,s,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,d
-p,f,y,e,f,s,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,l
-p,k,y,n,f,f,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,l
-p,x,s,e,f,f,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,d
-p,x,s,n,f,s,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,d
-p,k,y,n,f,y,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,d
-p,x,y,e,f,f,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,l
-p,k,s,n,f,f,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,p
-p,x,s,n,f,f,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,p
-p,x,y,e,f,f,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,d
-p,f,s,e,f,y,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,d
-p,f,s,n,f,s,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,l
-p,f,s,n,f,f,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,p
-p,f,s,e,f,y,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,p
-p,x,y,e,f,s,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,l
-p,x,s,e,f,f,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,l
-p,f,s,n,f,s,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,p
-p,f,y,e,f,s,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,d
-p,x,s,n,f,f,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,d
-p,x,s,e,f,s,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,d
-p,x,s,e,f,y,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,l
-p,f,s,e,f,f,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,p
-p,x,y,e,f,s,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,p
-p,x,s,n,f,y,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,d
-p,x,s,e,f,s,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,l
-p,x,s,e,f,y,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,l
-p,x,s,n,f,y,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,d
-p,x,s,n,f,s,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,l
-p,f,s,e,f,y,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,d
-p,f,y,e,f,f,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,d
-p,x,s,n,f,s,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,d
-p,x,s,e,f,f,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,l
-p,f,s,n,f,s,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,l
-p,x,s,n,f,f,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,d
-p,f,s,e,f,y,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,d
-p,k,y,n,f,y,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,p
-p,x,s,n,f,y,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,l
-p,f,y,n,f,y,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,p
-p,k,y,n,f,y,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,p
-p,f,s,e,f,s,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,d
-p,f,s,n,f,f,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,p
-p,x,s,e,f,y,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,d
-p,x,s,n,f,f,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,l
-p,f,y,n,f,y,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,p
-p,f,y,n,f,s,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,p
-p,x,s,e,f,y,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,l
-p,f,y,n,f,y,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,l
-p,x,y,e,f,s,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,p
-p,f,y,n,f,y,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,l
-p,f,y,e,f,s,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,p
-p,x,y,e,f,f,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,l
-p,f,s,e,f,y,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,l
-p,k,s,n,f,s,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,d
-p,x,y,e,f,f,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,p
-p,x,s,n,f,f,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,l
-p,f,s,e,f,s,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,p
-p,f,s,e,f,y,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,p
-e,b,f,g,f,n,f,w,b,p,e,?,s,k,w,w,p,w,t,p,w,s,g
-p,x,s,n,f,y,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,p
-p,k,s,n,f,s,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,p
-e,b,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,n,o,p,y,v,l
-p,f,s,n,f,y,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,d
-p,f,y,n,f,s,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,d
-p,x,y,e,f,s,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,d
-p,x,s,e,f,s,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,d
-p,k,y,c,f,m,a,c,b,w,e,c,k,y,c,c,p,w,n,n,w,c,d
-p,x,s,e,f,s,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,l
-p,f,s,n,f,s,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,p
-p,x,s,n,f,y,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,l
-p,x,y,e,f,s,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,d
-p,x,s,e,f,f,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,d
-p,f,s,e,f,f,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,l
-p,f,s,n,f,f,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,d
-p,f,s,n,f,s,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,d
-p,f,y,e,f,y,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,p
-p,x,s,e,f,y,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,l
-p,f,y,e,f,s,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,d
-p,f,y,n,f,y,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,l
-p,x,y,e,f,f,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,l
-p,f,y,e,f,s,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,p
-p,f,s,e,f,f,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,l
-p,f,s,e,f,f,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,p
-p,x,s,e,f,y,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,p
-p,x,s,e,f,f,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,d
-p,k,y,e,f,s,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,p
-p,x,y,e,f,f,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,l
-p,x,s,e,f,y,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,d
-p,x,s,n,f,y,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,d
-p,x,s,n,f,s,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,l
-p,f,s,n,f,y,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,d
-p,f,y,n,f,s,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,p
-p,x,s,e,f,s,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,d
-p,k,s,n,f,f,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,p
-p,f,s,n,f,y,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,d
-p,f,y,n,f,f,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,d
-p,f,y,e,f,y,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,p
-p,f,y,e,f,s,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,l
-p,f,y,n,f,f,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,d
-p,x,s,e,f,y,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,l
-p,f,y,n,f,s,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,d
-p,f,s,n,f,s,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,d
-p,f,s,e,f,f,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,p
-p,x,s,n,f,y,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,d
-p,f,y,e,f,f,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,d
-p,k,y,n,f,y,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,l
-p,f,s,n,f,y,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,p
-p,f,y,n,f,f,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,p
-p,f,s,e,f,y,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,l
-p,x,s,n,f,f,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,l
-p,f,s,e,f,f,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,p
-p,x,s,e,f,f,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,d
-p,f,y,e,f,s,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,d
-p,x,s,e,f,y,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,p
-p,f,s,e,f,s,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,p
-p,f,s,n,f,f,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,p
-p,x,s,n,f,y,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,p
-p,x,s,e,f,s,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,d
-p,f,s,e,f,s,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,d
-p,x,s,e,f,f,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,l
-p,x,s,n,f,s,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,l
-p,f,s,e,f,f,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,d
-p,x,y,e,f,s,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,l
-p,x,s,n,f,y,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,p
-p,x,y,e,f,s,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,p
-p,f,s,e,f,y,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,l
-p,x,s,e,f,s,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,d
-p,f,y,n,f,f,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,l
-p,x,s,e,f,f,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,l
-p,x,s,n,f,s,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,l
-p,f,s,e,f,y,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,d
-p,x,s,e,f,s,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,d
-p,f,s,n,f,y,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,l
-p,x,y,e,f,s,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,p
-p,f,y,e,f,y,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,d
-p,x,y,e,f,s,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,p
-p,k,y,n,f,y,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,l
-p,f,y,n,f,y,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,l
-p,x,y,e,f,f,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,l
-p,f,s,n,f,y,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,p
-p,x,s,e,f,s,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,d
-p,f,s,e,f,y,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,d
-e,x,f,g,f,n,f,w,b,g,e,?,k,k,w,w,p,w,t,p,w,s,g
-p,x,s,e,f,y,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,p
-p,f,s,n,f,f,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,l
-p,f,s,n,f,s,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,p
-p,x,s,e,f,f,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,l
-p,x,s,e,f,y,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,l
-p,x,s,e,f,s,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,l
-p,f,y,e,f,y,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,d
-e,b,f,g,f,n,f,w,b,w,e,?,s,s,w,w,p,w,t,p,w,s,g
-p,f,y,n,f,s,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,d
-p,f,y,e,f,f,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,d
-p,f,y,n,f,s,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,d
-p,f,s,n,f,f,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,l
-p,f,y,n,f,f,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,d
-p,x,y,e,f,s,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,d
-p,f,y,n,f,s,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,l
-p,x,s,n,f,y,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,d
-p,f,y,e,f,f,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,d
-e,k,f,g,f,n,f,w,b,p,e,?,k,s,w,w,p,w,t,p,w,s,g
-e,b,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,n,o,p,o,c,l
-p,f,s,n,f,s,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,l
-p,x,s,n,f,s,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,p
-p,f,y,n,f,s,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,l
-p,f,s,e,f,s,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,l
-p,f,y,n,f,y,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,l
-p,f,s,n,f,f,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,d
-p,f,y,e,f,y,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,p
-p,f,y,e,f,f,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,l
-p,f,s,n,f,s,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,l
-p,x,s,e,f,y,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,d
-e,x,f,w,f,n,f,w,b,g,e,?,s,k,w,w,p,w,t,p,w,n,g
-p,f,s,n,f,f,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,p
-p,x,s,e,f,y,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,l
-p,x,s,n,f,y,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,d
-p,x,s,n,f,y,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,l
-p,x,s,n,f,f,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,d
-p,k,y,n,f,y,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,d
-p,f,s,n,f,y,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,p
-p,x,y,e,f,y,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,p
-p,f,s,n,f,y,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,p
-p,f,s,n,f,f,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,d
-p,x,s,n,f,s,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,l
-p,x,s,n,f,f,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,d
-p,x,y,e,f,s,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,l
-p,f,s,e,f,f,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,d
-p,f,s,e,f,s,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,l
-p,f,y,n,f,f,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,p
-p,x,s,e,f,s,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,l
-p,f,s,e,f,s,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,l
-p,f,y,e,f,s,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,d
-p,f,y,e,f,y,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,l
-p,x,y,e,f,s,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,l
-p,f,y,n,f,f,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,l
-p,f,s,n,f,f,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,l
-p,f,s,n,f,y,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,p
-p,k,y,e,f,f,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,l
-p,f,s,e,f,y,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,l
-p,x,s,e,f,s,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,p
-p,x,y,e,f,y,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,p
-p,f,y,e,f,f,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,l
-p,x,y,e,f,s,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,d
-p,f,y,e,f,f,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,l
-p,f,y,n,f,y,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,d
-p,f,y,e,f,y,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,l
-p,x,y,e,f,s,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,p
-p,x,s,e,f,y,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,d
-p,f,s,e,f,f,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,l
-p,f,s,e,f,f,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,p
-p,x,s,e,f,f,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,p
-p,f,s,e,f,y,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,p
-p,f,s,e,f,s,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,p
-p,f,s,n,f,y,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,l
-p,f,y,e,f,f,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,d
-p,f,y,n,f,s,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,p
-p,k,s,e,f,f,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,l
-p,f,s,n,f,y,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,d
-p,f,y,n,f,y,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,p
-p,f,s,e,f,s,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,l
-p,f,y,e,f,y,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,l
-p,x,s,n,f,y,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,p
-p,k,y,n,f,y,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,p
-p,x,y,e,f,s,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,d
-p,x,s,e,f,f,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,l
-e,x,s,g,f,n,f,w,b,w,e,?,k,s,w,w,p,w,t,p,w,n,g
-p,k,y,n,f,y,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,d
-p,x,s,n,f,y,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,p
-p,f,s,n,f,s,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,p
-p,f,s,n,f,s,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,p
-p,x,s,e,f,s,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,p
-p,f,s,e,f,f,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,l
-p,f,y,e,f,f,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,l
-p,f,y,e,f,f,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,p
-p,f,s,n,f,f,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,l
-p,f,s,e,f,f,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,p
-p,x,s,n,f,f,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,p
-p,f,s,e,f,f,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,l
-p,x,s,e,f,s,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,p
-p,f,s,e,f,y,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,d
-p,f,y,e,f,f,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,p
-p,k,y,e,f,s,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,p
-p,f,s,n,f,s,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,p
-p,k,y,e,f,s,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,d
-p,f,s,e,f,y,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,l
-p,f,y,e,f,s,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,l
-p,f,s,e,f,f,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,l
-e,b,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,o,o,p,o,c,l
-p,x,s,n,f,f,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,d
-p,f,y,e,f,s,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,l
-p,f,y,n,f,s,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,d
-p,f,s,e,f,f,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,d
-p,f,y,n,f,f,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,p
-p,k,y,n,f,m,f,c,b,w,e,c,k,y,c,c,p,w,n,n,w,c,d
-p,x,s,e,f,f,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,p
-p,f,s,e,f,s,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,l
-p,k,y,n,f,y,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,d
-p,f,s,e,f,f,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,p
-e,k,s,w,f,n,f,w,b,g,e,?,k,k,w,w,p,w,t,p,w,s,g
-p,f,y,n,f,y,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,l
-p,f,y,n,f,y,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,d
-p,f,y,n,f,y,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,p
-p,f,s,n,f,s,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,d
-p,f,s,n,f,y,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,l
-p,f,y,e,f,y,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,p
-p,x,s,e,f,f,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,d
-p,f,y,n,f,f,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,p
-p,x,s,n,f,f,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,p
-p,x,s,e,f,f,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,l
-p,x,s,e,f,y,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,l
-p,f,y,e,f,y,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,d
-p,x,s,n,f,y,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,d
-p,x,s,e,f,s,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,p
-p,f,s,n,f,f,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,l
-p,x,s,n,f,y,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,l
-p,f,y,e,f,y,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,d
-p,x,s,n,f,f,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,d
-p,f,y,e,f,f,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,l
-p,f,s,e,f,f,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,p
-p,f,y,n,f,f,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,l
-p,f,y,n,f,y,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,d
-p,x,y,e,f,f,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,d
-e,k,f,g,f,n,f,w,b,w,e,?,k,k,w,w,p,w,t,p,w,n,g
-p,x,s,e,f,y,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,d
-p,k,y,n,f,y,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,d
-p,k,y,n,f,s,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,d
-p,x,y,e,f,s,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,p
-p,f,s,e,f,f,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,l
-p,f,s,n,f,s,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,l
-p,f,s,e,f,s,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,p
-p,x,s,e,f,f,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,p
-p,f,s,n,f,s,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,d
-p,f,y,e,f,s,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,p
-p,x,s,n,f,s,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,l
-p,x,y,e,f,f,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,p
-p,f,s,e,f,y,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,p
-p,x,s,n,f,y,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,d
-p,f,s,n,f,f,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,p
-e,x,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,o,o,p,y,v,l
-p,k,s,n,f,y,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,p
-p,f,s,n,f,y,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,l
-p,k,y,e,f,f,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,p
-e,x,y,c,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,w,y,p
-e,b,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,n,o,p,y,c,l
-e,k,s,g,f,n,f,w,b,w,e,?,s,k,w,w,p,w,t,p,w,n,g
-e,x,f,g,f,n,f,w,b,g,e,?,s,k,w,w,p,w,t,p,w,s,g
-p,x,s,n,f,s,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,d
-p,k,y,e,f,f,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,p
-e,x,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,n,o,p,o,c,l
-p,k,s,e,f,s,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,d
-p,b,y,y,f,n,f,w,n,y,e,c,y,y,y,y,p,y,o,e,w,c,l
-p,x,s,e,f,f,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,d
-e,b,s,g,f,n,f,w,b,g,e,?,k,k,w,w,p,w,t,p,w,n,g
-p,k,y,e,f,f,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,p
-p,k,s,e,f,s,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,p
-p,f,s,e,f,s,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,d
-p,f,y,n,f,y,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,p
-p,f,s,e,f,s,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,d
-e,k,f,g,f,n,f,w,b,w,e,?,s,k,w,w,p,w,t,p,w,s,g
-p,k,s,n,f,s,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,p
-p,k,s,e,f,y,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,d
-p,f,s,e,f,f,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,l
-e,b,f,g,f,n,f,w,b,p,e,?,k,k,w,w,p,w,t,p,w,n,g
-p,f,y,n,f,s,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,d
-p,f,s,n,f,y,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,p
-e,f,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,n,o,p,n,c,l
-e,x,y,p,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,w,y,p
-p,f,s,n,f,y,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,d
-p,k,y,e,f,y,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,l
-p,f,s,n,f,s,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,l
-e,b,s,w,f,n,f,w,b,w,e,?,s,k,w,w,p,w,t,p,w,n,g
-p,x,y,e,f,f,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,p
-p,f,y,e,f,y,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,l
-p,f,s,e,f,y,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,l
-p,k,y,n,f,f,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,d
-e,x,f,w,f,n,f,w,b,p,e,?,s,s,w,w,p,w,t,p,w,s,g
-e,b,s,w,f,n,f,w,b,p,e,?,s,s,w,w,p,w,t,p,w,n,g
-p,k,s,e,f,y,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,l
-p,f,y,n,f,y,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,l
-e,f,y,n,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,w,v,p
-p,k,s,n,f,s,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,d
-p,k,s,n,f,y,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,l
-p,f,y,n,f,s,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,l
-p,f,y,e,f,m,a,c,b,w,e,c,k,y,c,c,p,w,n,n,w,c,d
-p,x,s,e,f,f,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,d
-p,k,y,e,f,s,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,p
-p,x,s,n,f,s,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,l
-p,k,s,e,f,f,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,l
-p,k,y,n,f,f,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,l
-p,f,y,e,f,f,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,p
-e,x,f,w,f,n,f,w,b,w,e,?,s,s,w,w,p,w,t,p,w,s,g
-p,k,s,n,f,s,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,d
-p,f,s,n,f,f,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,p
-p,k,s,e,f,y,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,d
-p,k,s,e,f,s,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,l
-p,k,y,n,f,s,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,l
-e,k,f,w,f,n,f,w,b,g,e,?,s,s,w,w,p,w,t,p,w,s,g
-p,k,s,n,f,f,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,p
-p,k,y,n,f,s,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,l
-p,k,s,n,f,s,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,l
-p,k,s,e,f,s,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,d
-p,x,s,n,f,y,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,l
-p,x,y,e,f,s,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,l
-p,f,s,e,f,y,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,p
-p,k,y,n,f,f,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,d
-e,b,s,n,f,n,f,c,b,w,e,b,y,y,n,n,p,w,t,p,w,y,d
-p,f,s,n,f,f,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,p
-p,k,y,e,f,f,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,d
-e,x,s,n,f,n,f,c,b,w,e,b,y,y,n,n,p,w,t,p,w,y,d
-p,f,s,n,f,f,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,d
-p,k,s,n,f,y,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,l
-p,f,y,n,f,s,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,p
-p,x,s,n,f,s,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,p
-e,k,f,w,f,n,f,w,b,w,e,?,s,s,w,w,p,w,t,p,w,s,g
-p,f,y,e,f,f,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,d
-p,x,s,e,f,y,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,p
-p,x,s,n,f,f,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,l
-p,f,s,n,f,f,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,p
-p,k,s,n,f,y,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,l
-p,k,s,e,f,f,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,p
-p,k,s,e,f,s,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,l
-p,x,s,e,f,f,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,d
-e,b,s,n,f,n,f,c,b,w,e,b,y,y,n,n,p,w,t,p,w,y,p
-p,k,s,e,f,y,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,p
-p,x,s,n,f,f,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,p
-p,x,y,e,f,f,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,p
-p,f,s,e,f,s,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,l
-e,b,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,n,o,p,n,c,l
-e,k,s,g,f,n,f,w,b,p,e,?,k,k,w,w,p,w,t,p,w,n,g
-p,k,y,e,f,m,f,c,b,y,e,c,k,y,c,c,p,w,n,n,w,c,d
-p,k,y,e,f,y,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,l
-p,f,s,e,f,y,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,d
-e,k,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,o,o,p,o,c,l
-p,f,y,e,f,s,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,d
-p,k,y,e,f,s,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,d
-e,k,f,w,f,n,f,w,b,p,e,?,k,s,w,w,p,w,t,p,w,s,g
-e,x,s,g,f,n,f,w,b,w,e,?,s,k,w,w,p,w,t,p,w,s,g
-e,k,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,o,o,p,o,v,l
-p,k,y,e,f,s,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,d
-p,k,y,n,f,s,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,l
-p,k,s,e,f,s,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,p
-e,x,s,w,f,n,f,w,b,w,e,?,k,k,w,w,p,w,t,p,w,s,g
-e,k,s,w,f,n,f,w,b,g,e,?,k,k,w,w,p,w,t,p,w,n,g
-e,k,s,w,f,n,f,w,b,p,e,?,k,s,w,w,p,w,t,p,w,s,g
-p,k,s,e,f,f,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,l
-p,x,s,e,f,s,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,p
-e,k,s,g,f,n,f,w,b,w,e,?,s,k,w,w,p,w,t,p,w,s,g
-e,x,f,w,f,n,f,w,b,g,e,?,s,k,w,w,p,w,t,p,w,s,g
-p,k,y,e,f,f,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,d
-e,b,s,w,f,n,f,w,b,g,e,?,s,s,w,w,p,w,t,p,w,s,g
-p,k,s,e,f,s,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,p
-p,f,s,n,f,f,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,p
-p,f,y,n,f,f,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,d
-e,k,s,g,f,n,f,w,b,p,e,?,s,k,w,w,p,w,t,p,w,s,g
-e,x,s,w,f,n,f,w,b,w,e,?,s,s,w,w,p,w,t,p,w,n,g
-p,f,y,e,f,y,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,p
-p,x,s,e,f,f,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,d
-e,x,f,w,f,n,f,w,b,g,e,?,k,k,w,w,p,w,t,p,w,s,g
-e,k,f,w,f,n,f,w,b,g,e,?,k,k,w,w,p,w,t,p,w,n,g
-p,f,s,e,f,s,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,d
-e,f,s,g,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,w,v,p
-p,k,s,e,f,y,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,d
-p,x,s,n,f,y,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,p
-e,k,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,n,o,p,b,c,l
-e,x,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,o,o,p,n,c,l
-p,k,y,e,f,s,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,l
-p,x,s,e,f,y,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,d
-p,x,s,n,f,s,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,d
-p,k,s,e,f,y,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,d
-p,x,s,n,f,y,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,l
-e,k,f,w,f,n,f,w,b,g,e,?,k,s,w,w,p,w,t,p,w,n,g
-e,k,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,n,o,p,n,c,l
-p,k,y,c,f,m,f,c,b,w,e,c,k,y,c,c,p,w,n,n,w,c,d
-e,x,f,g,f,n,f,w,b,g,e,?,s,s,w,w,p,w,t,p,w,s,g
-e,x,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,o,o,p,o,v,l
-e,x,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,o,o,p,b,v,l
-p,k,y,e,f,s,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,p
-e,k,f,w,f,n,f,w,b,p,e,?,s,s,w,w,p,w,t,p,w,n,g
-e,x,f,g,f,n,f,w,b,g,e,?,k,s,w,w,p,w,t,p,w,s,g
-e,f,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,o,o,p,n,v,l
-p,k,y,n,f,y,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,d
-p,k,y,n,f,s,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,l
-p,f,y,n,f,s,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,d
-p,f,y,e,f,y,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,p
-e,k,f,g,f,n,f,w,b,p,e,?,s,k,w,w,p,w,t,p,w,n,g
-p,f,y,e,f,s,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,d
-p,f,y,n,f,s,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,p
-p,k,s,e,f,s,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,l
-e,k,s,g,f,n,f,w,b,w,e,?,k,k,w,w,p,w,t,p,w,s,g
-e,b,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,n,o,p,y,c,l
-p,k,y,n,f,f,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,l
-p,x,s,n,f,s,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,p
-e,b,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,n,o,p,o,v,l
-p,k,s,n,f,y,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,d
-p,k,s,n,f,y,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,p
-p,k,s,e,f,s,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,l
-p,f,s,e,f,s,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,p
-p,x,s,e,f,s,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,p
-p,x,s,e,f,y,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,d
-p,x,s,n,f,s,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,d
-p,k,y,n,f,y,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,d
-e,x,s,p,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,w,y,p
-p,f,s,e,f,y,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,d
-p,x,y,n,f,m,a,c,b,w,e,c,k,y,c,c,p,w,n,n,w,c,d
-p,k,s,n,f,f,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,l
-e,k,s,g,f,n,f,w,b,g,e,?,k,k,w,w,p,w,t,p,w,s,g
-p,k,s,n,f,f,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,d
-p,f,y,n,f,y,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,p
-p,k,s,n,f,s,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,p
-p,k,y,e,f,y,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,d
-p,f,y,e,f,f,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,d
-e,f,s,c,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,w,y,p
-p,f,y,n,f,f,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,l
-p,k,s,e,f,f,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,d
-p,x,s,e,f,y,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,l
-p,f,y,e,f,y,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,l
-p,x,s,n,f,f,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,p
-e,f,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,n,o,p,n,c,l
-p,f,s,e,f,s,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,d
-p,x,y,e,f,s,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,p
-p,x,y,e,f,f,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,d
-p,x,s,e,f,s,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,p
-p,k,s,e,f,f,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,l
-p,x,s,e,f,y,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,l
-e,b,f,g,f,n,f,w,b,w,e,?,k,s,w,w,p,w,t,p,w,s,g
-p,k,y,e,f,s,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,p
-p,k,s,e,f,s,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,l
-e,k,f,g,f,n,f,w,b,g,e,?,s,s,w,w,p,w,t,p,w,n,g
-e,f,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,n,o,p,n,v,l
-p,f,y,n,f,m,f,c,b,w,e,c,k,y,c,c,p,w,n,n,w,c,d
-p,k,s,e,f,s,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,d
-p,f,s,n,f,f,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,l
-p,k,y,n,f,f,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,p
-p,k,s,e,f,y,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,l
-p,k,y,n,f,y,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,p
-p,k,y,n,f,y,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,l
-e,x,f,g,f,n,f,w,b,g,e,?,k,k,w,w,p,w,t,p,w,n,g
-p,k,y,e,f,f,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,p
-p,x,y,c,f,m,a,c,b,y,e,c,k,y,c,c,p,w,n,n,w,c,d
-p,k,y,e,f,s,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,p
-p,k,y,e,f,s,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,l
-e,b,s,w,f,n,f,w,b,p,e,?,s,k,w,w,p,w,t,p,w,s,g
-p,k,s,n,f,y,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,d
-p,x,s,e,f,y,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,p
-p,f,y,n,f,f,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,l
-p,f,s,e,f,y,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,p
-p,k,y,e,f,f,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,d
-p,k,y,e,f,f,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,d
-e,b,f,w,f,n,f,w,b,w,e,?,s,s,w,w,p,w,t,p,w,n,g
-p,k,y,n,f,m,a,c,b,w,e,c,k,y,c,c,p,w,n,n,w,c,d
-p,f,y,e,f,y,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,d
-p,k,s,n,f,f,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,p
-p,k,y,n,f,f,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,p
-p,x,s,e,f,s,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,l
-e,x,y,g,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,w,v,p
-p,f,s,n,f,y,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,d
-p,k,y,e,f,y,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,p
-e,k,f,w,f,n,f,w,b,w,e,?,k,k,w,w,p,w,t,p,w,n,g
-p,x,s,n,f,s,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,d
-p,k,y,n,f,y,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,d
-p,x,s,e,f,s,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,d
-e,k,s,w,f,n,f,w,b,w,e,?,s,k,w,w,p,w,t,p,w,n,g
-p,f,y,n,f,s,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,p
-p,k,s,e,f,s,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,p
-p,k,s,e,f,y,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,p
-p,f,y,e,f,f,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,l
-e,f,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,n,o,p,y,v,l
-p,f,y,n,f,y,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,d
-p,k,y,e,f,y,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,d
-p,k,s,e,f,y,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,d
-p,k,s,e,f,f,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,l
-e,b,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,o,o,p,b,v,l
-e,k,s,g,f,n,f,w,b,p,e,?,k,k,w,w,p,w,t,p,w,s,g
-e,x,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,o,o,p,n,v,l
-e,x,f,g,f,n,f,w,b,p,e,?,k,k,w,w,p,w,t,p,w,n,g
-p,k,s,e,f,y,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,d
-p,f,y,e,f,f,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,d
-p,k,s,n,f,s,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,d
-p,k,s,n,f,y,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,p
-e,b,s,w,f,n,f,w,b,p,e,?,k,s,w,w,p,w,t,p,w,n,g
-p,f,s,n,f,s,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,d
-e,b,f,g,f,n,f,w,b,p,e,?,s,k,w,w,p,w,t,p,w,n,g
-p,x,s,n,f,f,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,d
-p,x,y,e,f,f,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,l
-p,f,y,c,f,m,f,c,b,w,e,c,k,y,c,c,p,w,n,n,w,c,d
-p,f,s,n,f,s,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,d
-p,x,y,e,f,s,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,l
-p,f,s,n,f,s,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,l
-p,f,y,e,f,f,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,p
-p,f,s,e,f,f,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,l
-p,x,s,e,f,s,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,d
-e,k,s,g,f,n,f,w,b,w,e,?,k,s,w,w,p,w,t,p,w,s,g
-p,x,s,e,f,y,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,p
-p,k,s,n,f,y,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,l
-p,k,y,e,f,s,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,l
-p,x,s,n,f,y,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,p
-e,b,s,g,f,n,f,w,b,g,e,?,s,k,w,w,p,w,t,p,w,n,g
-p,x,s,n,f,s,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,l
-p,f,s,e,f,s,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,d
-p,k,s,n,f,s,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,d
-e,b,s,g,f,n,f,w,b,w,e,?,s,s,w,w,p,w,t,p,w,s,g
-p,x,s,n,f,y,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,p
-p,f,y,e,f,y,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,p
-p,f,s,e,f,y,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,d
-p,f,y,n,f,m,f,c,b,y,e,c,k,y,c,c,p,w,n,n,w,c,d
-e,b,f,g,f,n,f,w,b,g,e,?,k,s,w,w,p,w,t,p,w,s,g
-p,f,s,e,f,f,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,p
-p,k,s,n,f,f,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,l
-e,k,s,g,f,n,f,w,b,p,e,?,s,k,w,w,p,w,t,p,w,n,g
-p,k,y,n,f,f,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,l
-p,x,s,e,f,s,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,l
-e,b,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,o,o,p,y,c,l
-p,k,y,n,f,f,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,l
-p,f,y,n,f,s,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,l
-e,x,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,n,o,p,b,c,l
-p,k,s,n,f,f,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,l
-p,k,s,n,f,f,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,d
-p,k,s,n,f,s,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,p
-p,k,s,n,f,s,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,p
-e,f,y,c,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,w,y,p
-e,f,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,n,o,p,n,c,l
-e,b,s,w,f,n,f,w,b,g,e,?,k,k,w,w,p,w,t,p,w,n,g
-p,k,s,e,f,s,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,p
-e,b,s,g,f,n,f,w,b,w,e,?,s,s,w,w,p,w,t,p,w,n,g
-p,k,y,e,f,y,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,p
-p,k,y,n,f,s,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,d
-e,x,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,o,o,p,y,c,l
-p,x,s,e,f,y,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,p
-p,x,s,n,f,f,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,d
-p,f,s,e,f,y,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,l
-p,f,y,e,f,s,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,p
-p,x,y,e,f,s,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,p
-p,k,s,e,f,f,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,d
-e,k,y,n,f,n,f,c,b,w,e,b,y,y,n,n,p,w,t,p,w,y,d
-p,f,s,e,f,f,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,d
-p,k,s,n,f,s,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,p
-p,f,y,n,f,y,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,p
-p,f,s,e,f,f,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,d
-e,k,f,g,f,n,f,w,b,w,e,?,s,s,w,w,p,w,t,p,w,n,g
-p,k,s,n,f,f,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,d
-e,b,f,w,f,n,f,w,b,p,e,?,k,s,w,w,p,w,t,p,w,s,g
-p,k,s,e,f,y,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,l
-e,x,s,g,f,n,f,w,b,p,e,?,k,k,w,w,p,w,t,p,w,n,g
-e,k,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,o,o,p,y,c,l
-p,k,s,e,f,f,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,p
-p,k,y,n,f,f,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,l
-e,k,f,g,f,n,f,w,b,g,e,?,k,k,w,w,p,w,t,p,w,n,g
-p,x,s,n,f,y,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,l
-p,k,y,n,f,s,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,l
-e,x,f,w,f,n,f,w,b,g,e,?,k,s,w,w,p,w,t,p,w,n,g
-p,k,y,e,f,f,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,p
-p,k,s,e,f,f,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,l
-p,k,s,e,f,f,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,l
-p,k,y,e,f,s,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,p
-p,k,y,n,f,s,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,p
-e,k,s,g,f,n,f,w,b,g,e,?,k,k,w,w,p,w,t,p,w,n,g
-p,k,y,n,f,f,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,p
-p,k,s,e,f,f,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,l
-e,x,s,g,f,n,f,w,b,w,e,?,s,k,w,w,p,w,t,p,w,n,g
-e,f,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,o,o,p,y,v,l
-e,k,s,g,f,n,f,w,b,g,e,?,k,s,w,w,p,w,t,p,w,n,g
-p,x,s,e,f,s,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,p
-p,k,s,n,f,f,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,d
-p,k,s,n,f,s,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,p
-e,x,s,g,f,n,f,w,b,p,e,?,k,s,w,w,p,w,t,p,w,s,g
-p,k,y,e,f,s,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,p
-e,f,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,o,o,p,y,v,l
-e,k,s,w,f,n,f,w,b,w,e,?,k,k,w,w,p,w,t,p,w,n,g
-p,f,y,e,f,m,f,c,b,y,e,c,k,y,c,c,p,w,n,n,w,c,d
-e,b,s,g,f,n,f,w,b,g,e,?,k,k,w,w,p,w,t,p,w,s,g
-p,f,y,n,f,y,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,p
-e,b,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,n,o,p,b,c,l
-e,b,s,g,f,n,f,w,b,w,e,?,k,s,w,w,p,w,t,p,w,n,g
-e,k,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,o,o,p,y,c,l
-e,b,s,g,f,n,f,w,b,p,e,?,k,k,w,w,p,w,t,p,w,n,g
-p,k,s,n,f,f,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,l
-p,k,y,n,f,y,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,p
-e,f,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,n,o,p,o,c,l
-p,k,s,e,f,y,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,p
-p,f,s,e,f,y,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,p
-p,k,s,n,f,y,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,l
-p,k,s,n,f,s,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,l
-e,b,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,o,o,p,y,v,l
-e,k,f,g,f,n,f,w,b,w,e,?,k,k,w,w,p,w,t,p,w,s,g
-e,b,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,o,o,p,o,c,l
-p,k,s,n,f,s,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,l
-p,k,y,e,f,s,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,l
-p,k,y,e,f,y,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,p
-p,k,s,n,f,y,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,l
-e,f,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,o,o,p,n,c,l
-p,k,s,e,f,y,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,d
-e,x,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,o,o,p,o,v,l
-p,k,s,n,f,y,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,d
-e,k,s,w,f,n,f,w,b,p,e,?,s,s,w,w,p,w,t,p,w,s,g
-p,k,y,e,f,f,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,d
-p,k,y,n,f,f,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,d
-e,k,f,g,f,n,f,w,b,g,e,?,s,s,w,w,p,w,t,p,w,s,g
-p,f,s,n,f,y,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,p
-e,x,s,g,f,n,f,w,b,w,e,?,s,s,w,w,p,w,t,p,w,n,g
-p,k,y,n,f,f,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,p
-p,f,s,e,f,f,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,d
-e,x,f,w,f,n,f,w,b,p,e,?,k,k,w,w,p,w,t,p,w,n,g
-p,k,y,n,f,s,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,l
-p,f,y,n,f,m,a,c,b,w,e,c,k,y,c,c,p,w,n,n,w,c,d
-e,b,f,g,f,n,f,w,b,w,e,?,s,s,w,w,p,w,t,p,w,n,g
-p,k,y,e,f,y,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,p
-p,x,y,e,f,s,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,d
-p,k,s,e,f,y,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,l
-e,k,f,g,f,n,f,w,b,g,e,?,s,k,w,w,p,w,t,p,w,n,g
-e,k,f,w,f,n,f,w,b,p,e,?,s,s,w,w,p,w,t,p,w,s,g
-p,k,y,e,f,s,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,d
-e,k,s,g,f,n,f,w,b,g,e,?,s,s,w,w,p,w,t,p,w,n,g
-p,k,s,n,f,f,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,d
-p,k,y,n,f,y,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,l
-e,k,s,w,f,n,f,w,b,w,e,?,k,s,w,w,p,w,t,p,w,s,g
-p,k,y,e,f,y,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,l
-e,b,s,w,f,n,f,w,b,w,e,?,k,s,w,w,p,w,t,p,w,s,g
-p,k,s,n,f,y,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,d
-e,x,s,w,f,n,f,w,b,w,e,?,s,k,w,w,p,w,t,p,w,n,g
-e,b,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,o,o,p,n,v,l
-p,k,s,n,f,s,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,d
-p,k,y,e,f,f,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,p
-e,x,f,w,f,n,f,w,b,w,e,?,s,s,w,w,p,w,t,p,w,n,g
-p,x,y,e,f,m,a,c,b,y,e,c,k,y,c,c,p,w,n,n,w,c,d
-p,k,s,e,f,y,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,p
-p,k,s,n,f,f,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,l
-p,k,y,n,f,y,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,p
-p,k,s,e,f,f,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,d
-e,k,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,n,o,p,b,c,l
-p,k,y,n,f,f,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,l
-e,f,y,n,f,n,f,c,b,w,e,b,y,y,n,n,p,w,t,p,w,y,d
-e,k,s,g,f,n,f,w,b,w,e,?,s,s,w,w,p,w,t,p,w,n,g
-p,f,y,e,f,y,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,l
-p,k,y,y,f,n,f,w,n,y,e,c,y,y,y,y,p,y,o,e,w,c,l
-p,k,y,e,f,y,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,p
-p,k,s,e,f,s,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,l
-p,k,y,e,f,s,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,l
-e,b,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,n,o,p,n,c,l
-e,x,s,g,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,w,y,p
-e,k,s,g,f,n,f,w,b,p,e,?,k,s,w,w,p,w,t,p,w,n,g
-p,f,y,e,f,s,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,p
-p,k,s,e,f,s,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,p
-p,k,y,e,f,s,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,l
-e,b,f,w,f,n,f,w,b,w,e,?,k,k,w,w,p,w,t,p,w,n,g
-e,f,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,o,o,p,o,c,l
-p,k,s,n,f,y,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,p
-p,k,s,e,f,f,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,d
-e,x,f,w,f,n,f,w,b,g,e,?,k,s,w,w,p,w,t,p,w,s,g
-p,k,y,e,f,y,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,l
-e,x,f,w,f,n,f,w,b,g,e,?,s,s,w,w,p,w,t,p,w,s,g
-p,k,y,e,f,y,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,l
-p,k,y,n,f,s,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,p
-e,x,s,g,f,n,f,w,b,p,e,?,k,s,w,w,p,w,t,p,w,n,g
-e,x,s,w,f,n,f,w,b,g,e,?,s,k,w,w,p,w,t,p,w,n,g
-p,k,y,n,f,f,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,d
-p,k,s,n,f,f,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,l
-p,k,s,e,f,s,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,d
-e,b,f,w,f,n,f,w,b,p,e,?,k,k,w,w,p,w,t,p,w,n,g
-p,f,s,n,f,f,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,p
-p,k,s,e,f,y,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,d
-p,k,s,n,f,f,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,l
-p,x,y,n,f,m,f,c,b,y,e,c,k,y,c,c,p,w,n,n,w,c,d
-p,k,y,n,f,f,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,l
-e,f,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,n,o,p,y,c,l
-p,x,y,e,f,f,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,d
-e,f,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,n,o,p,y,v,l
-e,b,s,w,f,n,f,w,b,g,e,?,s,s,w,w,p,w,t,p,w,n,g
-e,b,s,g,f,n,f,w,b,p,e,?,s,s,w,w,p,w,t,p,w,n,g
-p,k,y,e,f,s,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,l
-p,k,s,n,f,y,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,d
-p,k,s,e,f,f,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,p
-p,k,s,e,f,y,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,d
-e,b,f,w,f,n,f,w,b,g,e,?,s,s,w,w,p,w,t,p,w,s,g
-e,k,s,g,f,n,f,w,b,p,e,?,s,s,w,w,p,w,t,p,w,n,g
-p,f,y,e,f,m,f,c,b,w,e,c,k,y,c,c,p,w,n,n,w,c,d
-p,k,y,n,f,f,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,d
-p,k,y,n,f,f,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,p
-p,f,s,e,f,y,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,d
-p,x,s,n,f,s,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,d
-p,k,y,n,f,s,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,p
-p,k,y,c,f,m,f,c,b,y,e,c,k,y,c,c,p,w,n,n,w,c,d
-p,k,y,n,f,s,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,d
-e,k,s,w,f,n,f,w,b,g,e,?,s,s,w,w,p,w,t,p,w,n,g
-p,f,y,n,f,y,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,l
-e,b,f,g,f,n,f,w,b,p,e,?,s,s,w,w,p,w,t,p,w,s,g
-p,f,s,e,f,f,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,d
-p,k,s,e,f,y,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,l
-e,k,s,g,f,n,f,w,b,p,e,?,k,s,w,w,p,w,t,p,w,s,g
-e,x,s,g,f,n,f,w,b,p,e,?,s,s,w,w,p,w,t,p,w,s,g
-p,k,s,e,f,y,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,l
-e,k,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,n,o,p,y,c,l
-e,f,y,p,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,w,y,p
-e,b,s,w,f,n,f,w,b,g,e,?,s,k,w,w,p,w,t,p,w,n,g
-p,f,y,n,f,s,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,l
-p,f,s,e,f,f,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,p
-p,k,s,e,f,f,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,l
-e,k,f,g,f,n,f,w,b,p,e,?,s,s,w,w,p,w,t,p,w,n,g
-e,x,f,w,f,n,f,w,b,p,e,?,k,k,w,w,p,w,t,p,w,s,g
-e,x,s,c,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,w,y,p
-e,k,f,g,f,n,f,w,b,w,e,?,s,s,w,w,p,w,t,p,w,s,g
-p,k,s,e,f,y,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,l
-p,k,y,e,f,y,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,p
-p,k,s,n,f,y,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,p
-p,x,y,e,f,f,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,p
-e,k,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,o,o,p,o,c,l
-p,f,y,y,f,n,f,w,n,w,e,c,y,y,y,y,p,y,o,e,w,c,l
-p,x,y,c,f,m,a,c,b,w,e,c,k,y,c,c,p,w,n,n,w,c,d
-p,x,s,n,f,y,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,d
-e,x,y,p,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,w,v,p
-p,k,y,n,f,s,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,p
-e,k,s,g,f,n,f,w,b,g,e,?,s,k,w,w,p,w,t,p,w,n,g
-e,b,s,g,f,n,f,w,b,p,e,?,s,k,w,w,p,w,t,p,w,s,g
-p,k,s,e,f,f,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,p
-e,x,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,n,o,p,b,v,l
-e,b,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,n,o,p,b,v,l
-e,f,s,n,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,w,y,p
-e,k,s,g,f,n,f,w,b,g,e,?,s,s,w,w,p,w,t,p,w,s,g
-p,f,s,n,f,y,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,d
-p,k,s,n,f,f,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,d
-p,f,y,n,f,s,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,l
-p,k,y,e,f,y,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,d
-p,k,s,n,f,f,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,l
-p,k,s,e,f,y,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,p
-e,b,s,w,f,n,f,w,b,w,e,?,k,s,w,w,p,w,t,p,w,n,g
-p,k,y,n,f,m,f,c,b,y,e,c,k,y,c,c,p,w,n,n,w,c,d
-p,k,y,e,f,y,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,d
-e,k,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,o,o,p,y,v,l
-p,k,s,e,f,f,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,p
-e,b,s,w,f,n,f,w,b,w,e,?,k,k,w,w,p,w,t,p,w,s,g
-e,x,s,g,f,n,f,w,b,p,e,?,k,k,w,w,p,w,t,p,w,s,g
-p,k,s,n,f,s,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,l
-p,k,y,n,f,y,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,p
-p,k,s,n,f,f,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,d
-p,k,y,e,f,f,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,p
-p,k,s,e,f,s,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,l
-p,k,y,n,f,y,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,p
-e,b,s,g,f,n,f,w,b,p,e,?,k,k,w,w,p,w,t,p,w,s,g
-p,k,y,e,f,y,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,p
-e,b,f,w,f,n,f,w,b,g,e,?,s,k,w,w,p,w,t,p,w,s,g
-p,c,y,y,f,n,f,w,n,y,e,c,y,y,y,y,p,y,o,e,w,c,l
-p,x,s,n,f,s,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,p
-p,k,y,e,f,y,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,l
-e,x,s,g,f,n,f,w,b,g,e,?,s,s,w,w,p,w,t,p,w,s,g
-e,b,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,n,o,p,y,v,l
-p,k,y,e,f,y,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,l
-e,b,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,o,o,p,n,c,l
-p,k,y,e,f,y,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,l
-p,k,y,e,f,f,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,l
-p,k,s,e,f,y,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,p
-e,b,s,g,f,n,f,w,b,p,e,?,s,s,w,w,p,w,t,p,w,s,g
-e,k,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,o,o,p,n,c,l
-e,b,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,n,o,p,n,v,l
-p,x,s,n,f,f,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,d
-e,x,s,g,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,w,v,p
-e,x,f,g,f,n,f,w,b,w,e,?,k,s,w,w,p,w,t,p,w,s,g
-p,k,s,n,f,s,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,l
-e,k,f,g,f,n,f,w,b,p,e,?,k,s,w,w,p,w,t,p,w,n,g
-p,k,s,n,f,f,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,p
-p,k,y,e,f,y,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,d
-p,k,y,n,f,s,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,d
-e,f,s,p,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,w,y,p
-p,k,y,n,f,f,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,d
-p,k,y,e,f,f,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,p
-e,f,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,n,o,p,b,c,l
-p,k,y,e,f,f,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,l
-p,k,y,n,f,y,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,d
-p,k,y,n,f,y,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,d
-p,f,y,n,f,s,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,p
-p,k,s,e,f,f,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,p
-p,k,y,e,f,s,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,l
-e,x,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,o,o,p,o,c,l
-p,f,s,e,f,s,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,p
-e,k,s,g,f,n,f,w,b,p,e,?,s,s,w,w,p,w,t,p,w,s,g
-p,k,s,e,f,s,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,l
-p,k,s,e,f,y,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,l
-e,f,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,o,o,p,o,v,l
-p,k,s,n,f,s,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,l
-e,x,f,g,f,n,f,w,b,p,e,?,s,k,w,w,p,w,t,p,w,s,g
-e,k,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,o,o,p,o,v,l
-p,f,s,e,f,s,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,p
-p,k,s,n,f,s,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,d
-e,f,y,n,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,w,y,p
-e,x,f,g,f,n,f,w,b,g,e,?,s,k,w,w,p,w,t,p,w,n,g
-e,k,s,w,f,n,f,w,b,w,e,?,k,s,w,w,p,w,t,p,w,n,g
-e,b,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,n,o,p,b,c,l
-p,k,y,e,f,s,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,p
-p,k,y,e,f,y,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,l
-e,k,y,n,f,n,f,c,b,w,e,b,y,y,n,n,p,w,t,p,w,y,p
-e,x,f,g,f,n,f,w,b,p,e,?,k,k,w,w,p,w,t,p,w,s,g
-p,k,s,n,f,f,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,p
-p,k,y,e,f,f,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,l
-p,k,y,n,f,f,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,d
-p,f,y,n,f,f,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,l
-e,f,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,n,o,p,b,v,l
-p,f,y,e,f,f,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,d
-p,k,s,n,f,s,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,p
-p,k,s,n,f,f,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,l
-e,b,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,o,o,p,y,c,l
-e,x,f,g,f,n,f,w,b,g,e,?,k,s,w,w,p,w,t,p,w,n,g
-p,k,s,n,f,s,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,p
-p,k,y,n,f,f,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,p
-e,x,s,g,f,n,f,w,b,w,e,?,s,s,w,w,p,w,t,p,w,s,g
-p,k,y,n,f,s,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,d
-p,k,s,e,f,s,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,d
-p,k,y,e,f,y,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,l
-e,b,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,n,o,p,o,c,l
-e,b,s,g,f,n,f,w,b,g,e,?,s,k,w,w,p,w,t,p,w,s,g
-p,x,y,e,f,m,a,c,b,w,e,c,k,y,c,c,p,w,n,n,w,c,d
-p,k,s,n,f,f,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,p
-p,k,y,e,f,f,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,d
-p,k,y,e,f,y,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,d
-p,k,s,n,f,f,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,d
-p,k,y,e,f,s,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,l
-p,k,s,e,f,s,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,d
-p,x,s,e,f,y,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,p
-p,k,s,e,f,s,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,d
-p,k,s,n,f,y,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,d
-e,b,f,g,f,n,f,w,b,w,e,?,k,k,w,w,p,w,t,p,w,s,g
-p,k,y,n,f,m,a,c,b,y,e,c,k,y,c,c,p,w,n,n,w,c,d
-e,f,s,n,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,w,v,p
-p,k,s,n,f,y,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,p
-p,f,y,y,f,n,f,w,n,y,e,c,y,y,y,y,p,y,o,e,w,c,l
-p,k,s,n,f,s,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,l
-p,f,y,e,f,m,a,c,b,y,e,c,k,y,c,c,p,w,n,n,w,c,d
-e,x,f,g,f,n,f,w,b,p,e,?,s,s,w,w,p,w,t,p,w,n,g
-p,k,y,n,f,y,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,d
-p,k,s,n,f,s,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,l
-p,f,y,n,f,y,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,d
-e,f,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,o,o,p,y,c,l
-p,k,y,n,f,s,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,d
-p,f,y,n,f,f,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,d
-p,k,s,n,f,s,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,p
-p,f,y,e,f,s,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,l
-p,k,s,n,f,s,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,p
-p,x,y,e,f,s,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,d
-p,k,y,n,f,y,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,p
-e,x,s,w,f,n,f,w,b,p,e,?,k,s,w,w,p,w,t,p,w,s,g
-e,x,s,g,f,n,f,w,b,g,e,?,s,k,w,w,p,w,t,p,w,s,g
-e,x,f,w,f,n,f,w,b,w,e,?,k,k,w,w,p,w,t,p,w,s,g
-p,k,s,n,f,s,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,d
-p,k,s,e,f,f,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,d
-p,k,y,e,f,y,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,p
-p,k,s,e,f,f,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,d
-p,k,s,n,f,y,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,p
-p,k,y,n,f,f,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,l
-p,k,y,e,f,y,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,p
-p,k,s,n,f,y,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,d
-p,k,s,e,f,s,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,d
-e,f,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,n,o,p,o,c,l
-p,f,s,n,f,s,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,d
-p,k,s,n,f,s,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,l
-e,x,s,g,f,n,f,w,b,g,e,?,k,s,w,w,p,w,t,p,w,n,g
-e,x,f,w,f,n,f,w,b,p,e,?,s,s,w,w,p,w,t,p,w,n,g
-p,k,y,n,f,s,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,d
-e,x,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,n,o,p,o,c,l
-p,k,y,n,f,s,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,l
-p,k,y,n,f,s,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,l
-p,k,y,n,f,s,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,p
-p,k,y,e,f,s,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,l
-p,k,s,n,f,f,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,l
-e,b,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,n,o,p,b,c,l
-e,f,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,o,o,p,n,c,l
-e,b,s,w,f,n,f,w,b,g,e,?,k,k,w,w,p,w,t,p,w,s,g
-p,k,s,n,f,y,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,l
-e,x,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,n,o,p,n,c,l
-p,k,y,n,f,s,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,d
-e,b,f,g,f,n,f,w,b,g,e,?,k,k,w,w,p,w,t,p,w,s,g
-e,b,s,g,f,n,f,w,b,p,e,?,k,s,w,w,p,w,t,p,w,n,g
-p,x,s,n,f,s,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,d
-e,x,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,o,o,p,b,c,l
-e,x,s,w,f,n,f,w,b,p,e,?,s,k,w,w,p,w,t,p,w,n,g
-e,x,s,n,f,n,f,c,b,w,e,b,y,y,n,n,p,w,t,p,w,y,p
-e,k,s,g,f,n,f,w,b,g,e,?,s,k,w,w,p,w,t,p,w,s,g
-e,x,s,w,f,n,f,w,b,g,e,?,k,k,w,w,p,w,t,p,w,n,g
-p,f,y,c,f,m,f,c,b,y,e,c,k,y,c,c,p,w,n,n,w,c,d
-e,x,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,o,o,p,y,c,l
-p,k,s,e,f,y,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,l
-p,k,y,e,f,s,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,l
-e,x,s,w,f,n,f,w,b,p,e,?,k,s,w,w,p,w,t,p,w,n,g
-e,k,f,w,f,n,f,w,b,g,e,?,k,k,w,w,p,w,t,p,w,s,g
-e,b,f,g,f,n,f,w,b,p,e,?,k,s,w,w,p,w,t,p,w,s,g
-e,k,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,o,o,p,b,v,l
-p,k,y,n,f,f,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,p
-p,f,y,e,f,s,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,p
-p,k,s,e,f,s,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,l
-e,k,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,n,o,p,n,v,l
-e,f,s,p,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,w,v,p
-p,k,y,n,f,f,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,p
-e,b,s,w,f,n,f,w,b,g,e,?,k,s,w,w,p,w,t,p,w,s,g
-p,k,s,e,f,y,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,p
-e,k,s,w,f,n,f,w,b,p,e,?,k,k,w,w,p,w,t,p,w,s,g
-p,k,y,n,f,s,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,l
-p,k,s,n,f,s,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,l
-e,f,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,o,o,p,b,c,l
-p,k,s,n,f,y,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,l
-p,k,y,e,f,f,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,d
-e,x,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,n,o,p,y,v,l
-e,x,f,w,f,n,f,w,b,g,e,?,k,k,w,w,p,w,t,p,w,n,g
-e,x,f,g,f,n,f,w,b,w,e,?,k,s,w,w,p,w,t,p,w,n,g
-e,x,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,n,o,p,b,v,l
-p,k,s,n,f,y,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,p
-p,k,y,e,f,y,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,p
-e,x,s,w,f,n,f,w,b,g,e,?,s,s,w,w,p,w,t,p,w,s,g
-p,k,y,e,f,y,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,d
-e,x,f,g,f,n,f,w,b,w,e,?,s,s,w,w,p,w,t,p,w,s,g
-p,k,s,n,f,s,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,l
-p,f,y,n,f,f,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,d
-p,k,s,e,f,s,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,d
-e,k,s,n,f,n,f,c,b,w,e,b,y,y,n,n,p,w,t,p,w,y,p
-p,k,s,e,f,y,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,l
-p,k,s,n,f,y,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,p
-e,b,s,g,f,n,f,w,b,g,e,?,s,s,w,w,p,w,t,p,w,s,g
-p,k,y,n,f,s,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,l
-p,k,s,n,f,f,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,p
-e,k,f,g,f,n,f,w,b,w,e,?,k,s,w,w,p,w,t,p,w,n,g
-p,k,y,n,f,f,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,l
-p,k,s,e,f,y,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,l
-p,k,y,n,f,s,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,p
-p,k,s,e,f,y,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,d
-e,k,f,w,f,n,f,w,b,g,e,?,s,k,w,w,p,w,t,p,w,n,g
-p,k,y,e,f,y,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,d
-p,k,y,e,f,s,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,d
-e,b,f,w,f,n,f,w,b,g,e,?,k,s,w,w,p,w,t,p,w,s,g
-e,x,s,g,f,n,f,w,b,g,e,?,s,s,w,w,p,w,t,p,w,n,g
-e,b,s,g,f,n,f,w,b,w,e,?,k,s,w,w,p,w,t,p,w,s,g
-p,k,y,e,f,s,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,p
-p,k,s,n,f,y,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,p
-p,k,y,e,f,y,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,d
-e,b,s,w,f,n,f,w,b,w,e,?,s,k,w,w,p,w,t,p,w,s,g
-p,f,s,e,f,f,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,d
-p,k,s,n,f,s,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,d
-e,k,s,w,f,n,f,w,b,p,e,?,s,k,w,w,p,w,t,p,w,s,g
-p,k,y,e,f,y,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,d
-p,k,s,e,f,f,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,l
-p,k,s,n,f,s,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,d
-e,x,s,n,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,w,v,p
-p,k,s,n,f,f,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,p
-p,k,y,e,f,f,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,p
-p,b,y,y,f,n,f,w,n,w,e,c,y,y,y,y,p,y,o,e,w,c,l
-p,k,y,e,f,f,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,l
-e,x,s,n,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,w,y,p
-p,k,s,e,f,f,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,p
-e,k,f,g,f,n,f,w,b,p,e,?,k,k,w,w,p,w,t,p,w,s,g
-p,k,s,e,f,s,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,d
-p,k,s,e,f,f,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,d
-p,k,s,n,f,y,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,l
-p,k,y,n,f,y,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,l
-e,x,f,g,f,n,f,w,b,p,e,?,k,s,w,w,p,w,t,p,w,n,g
-e,b,s,w,f,n,f,w,b,g,e,?,k,s,w,w,p,w,t,p,w,n,g
-p,k,y,e,f,s,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,d
-e,b,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,o,o,p,n,c,l
-p,k,y,e,f,y,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,p
-p,k,s,e,f,y,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,p
-e,b,s,g,f,n,f,w,b,g,e,?,k,s,w,w,p,w,t,p,w,s,g
-p,k,s,e,f,y,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,p
-e,b,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,n,o,p,o,v,l
-p,f,s,n,f,y,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,p
-p,k,s,e,f,y,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,d
-p,k,s,n,f,y,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,l
-p,k,y,n,f,y,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,l
-e,b,f,g,f,n,f,w,b,g,e,?,s,s,w,w,p,w,t,p,w,n,g
-e,k,s,w,f,n,f,w,b,g,e,?,s,s,w,w,p,w,t,p,w,s,g
-p,k,s,n,f,y,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,l
-p,f,y,n,f,f,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,d
-e,x,s,w,f,n,f,w,b,p,e,?,k,k,w,w,p,w,t,p,w,s,g
-e,x,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,o,o,p,b,c,l
-p,k,s,n,f,s,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,p
-p,k,y,n,f,s,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,l
-e,k,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,o,o,p,b,v,l
-e,b,f,w,f,n,f,w,b,w,e,?,k,k,w,w,p,w,t,p,w,s,g
-e,x,s,w,f,n,f,w,b,w,e,?,k,k,w,w,p,w,t,p,w,n,g
-p,f,y,e,f,y,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,p
-p,k,y,e,f,f,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,l
-p,x,y,e,f,m,f,c,b,y,e,c,k,y,c,c,p,w,n,n,w,c,d
-e,f,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,n,o,p,y,c,l
-p,k,s,n,f,f,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,d
-p,x,s,n,f,s,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,d
-p,k,y,e,f,s,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,d
-e,b,s,w,f,n,f,w,b,p,e,?,s,s,w,w,p,w,t,p,w,s,g
-e,b,s,g,f,n,f,w,b,p,e,?,k,s,w,w,p,w,t,p,w,s,g
-e,k,f,g,f,n,f,w,b,g,e,?,k,s,w,w,p,w,t,p,w,n,g
-e,k,f,g,f,n,f,w,b,p,e,?,k,k,w,w,p,w,t,p,w,n,g
-e,x,f,w,f,n,f,w,b,w,e,?,k,s,w,w,p,w,t,p,w,n,g
-p,k,y,e,f,s,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,p
-p,f,y,e,f,s,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,l
-p,k,y,e,f,s,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,l
-p,f,s,n,f,f,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,l
-e,b,s,w,f,n,f,w,b,p,e,?,k,s,w,w,p,w,t,p,w,s,g
-e,k,s,w,f,n,f,w,b,w,e,?,k,k,w,w,p,w,t,p,w,s,g
-e,b,f,w,f,n,f,w,b,p,e,?,s,s,w,w,p,w,t,p,w,n,g
-e,f,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,n,o,p,b,c,l
-p,k,y,e,f,s,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,d
-p,k,s,n,f,f,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,p
-p,k,s,n,f,y,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,p
-p,f,y,n,f,y,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,d
-p,x,s,e,f,f,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,l
-e,f,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,o,o,p,o,v,l
-e,k,f,w,f,n,f,w,b,w,e,?,s,k,w,w,p,w,t,p,w,s,g
-p,k,s,n,f,y,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,d
-p,x,s,n,f,y,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,d
-e,b,f,g,f,n,f,w,b,w,e,?,s,k,w,w,p,w,t,p,w,s,g
-p,k,y,n,f,s,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,d
-e,b,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,o,o,p,y,c,l
-e,f,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,o,o,p,n,v,l
-p,k,y,e,f,s,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,p
-p,k,s,e,f,s,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,d
-p,k,y,n,f,s,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,p
-p,k,y,e,f,s,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,p
-p,k,y,n,f,f,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,d
-e,x,s,g,f,n,f,w,b,w,e,?,k,k,w,w,p,w,t,p,w,s,g
-p,k,y,n,f,f,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,d
-e,f,y,c,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,w,v,p
-e,b,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,n,o,p,n,v,l
-p,k,s,e,f,y,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,l
-p,k,s,n,f,f,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,d
-p,k,y,n,f,f,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,p
-e,b,f,g,f,n,f,w,b,g,e,?,s,k,w,w,p,w,t,p,w,s,g
-p,k,y,n,f,s,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,p
-e,b,f,w,f,n,f,w,b,p,e,?,k,k,w,w,p,w,t,p,w,s,g
-p,k,s,e,f,f,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,l
-e,x,s,w,f,n,f,w,b,p,e,?,s,k,w,w,p,w,t,p,w,s,g
-p,k,y,e,f,f,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,l
-p,k,y,e,f,s,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,d
-e,x,s,w,f,n,f,w,b,w,e,?,k,s,w,w,p,w,t,p,w,n,g
-p,k,y,e,f,f,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,d
-p,k,y,n,f,s,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,p
-e,k,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,n,o,p,o,v,l
-e,k,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,n,o,p,n,v,l
-p,k,s,e,f,y,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,l
-p,k,s,e,f,s,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,p
-e,k,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,n,o,p,n,c,l
-p,k,s,n,f,y,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,d
-p,k,s,e,f,f,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,d
-p,k,y,e,f,s,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,d
-p,k,y,e,f,f,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,d
-p,k,y,e,f,f,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,l
-e,k,f,w,f,n,f,w,b,w,e,?,s,k,w,w,p,w,t,p,w,n,g
-e,b,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,o,o,p,n,v,l
-p,k,s,e,f,s,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,d
-e,f,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,o,o,p,b,v,l
-e,f,y,n,f,n,f,c,b,w,e,b,y,y,n,n,p,w,t,p,w,y,p
-p,k,y,e,f,y,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,l
-e,f,s,g,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,w,y,p
-e,x,s,p,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,w,v,p
-p,c,y,y,f,n,f,w,n,w,e,c,y,y,y,y,p,y,o,e,w,c,l
-p,k,s,e,f,f,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,p
-p,k,y,e,f,f,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,d
-p,k,y,e,f,m,a,c,b,y,e,c,k,y,c,c,p,w,n,n,w,c,d
-e,b,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,n,o,p,o,v,l
-p,k,s,n,f,s,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,l
-e,x,s,w,f,n,f,w,b,g,e,?,s,k,w,w,p,w,t,p,w,s,g
-p,x,s,e,f,f,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,l
-p,x,y,c,f,m,f,c,b,w,e,c,k,y,c,c,p,w,n,n,w,c,d
-p,f,s,n,f,s,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,l
-e,k,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,o,o,p,b,c,l
-e,b,y,n,f,n,f,c,b,w,e,b,y,y,n,n,p,w,t,p,w,y,d
-p,f,s,n,f,s,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,d
-p,k,s,n,f,y,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,p
-e,b,s,g,f,n,f,w,b,w,e,?,k,k,w,w,p,w,t,p,w,s,g
-e,x,y,n,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,w,v,p
-p,f,s,n,f,s,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,p
-e,k,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,n,o,p,y,c,l
-e,b,f,w,f,n,f,w,b,w,e,?,k,s,w,w,p,w,t,p,w,n,g
-e,x,f,w,f,n,f,w,b,g,e,?,s,s,w,w,p,w,t,p,w,n,g
-p,k,s,n,f,y,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,d
-p,x,y,n,f,m,a,c,b,y,e,c,k,y,c,c,p,w,n,n,w,c,d
-p,k,y,e,f,s,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,p
-e,x,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,n,o,p,b,c,l
-p,k,s,e,f,f,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,p
-p,x,s,e,f,s,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,l
-p,k,s,n,f,y,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,l
-p,k,y,e,f,s,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,d
-p,k,y,n,f,f,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,l
-p,k,s,e,f,f,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,d
-e,b,f,g,f,n,f,w,b,p,e,?,s,s,w,w,p,w,t,p,w,n,g
-e,k,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,n,o,p,y,v,l
-p,k,y,n,f,s,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,p
-p,k,y,y,f,n,f,w,n,w,e,c,y,y,y,y,p,y,o,e,w,c,l
-e,k,s,w,f,n,f,w,b,p,e,?,k,s,w,w,p,w,t,p,w,n,g
-e,x,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,o,o,p,y,c,l
-p,k,s,n,f,s,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,p
-e,k,f,g,f,n,f,w,b,p,e,?,s,k,w,w,p,w,t,p,w,s,g
-e,k,f,w,f,n,f,w,b,w,e,?,s,s,w,w,p,w,t,p,w,n,g
-e,f,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,n,o,p,b,c,l
-e,x,s,w,f,n,f,w,b,p,e,?,k,k,w,w,p,w,t,p,w,n,g
-e,b,s,g,f,n,f,w,b,w,e,?,k,k,w,w,p,w,t,p,w,n,g
-e,x,f,w,f,n,f,w,b,w,e,?,s,k,w,w,p,w,t,p,w,n,g
-e,k,f,w,f,n,f,w,b,p,e,?,k,s,w,w,p,w,t,p,w,n,g
-p,k,s,n,f,f,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,d
-p,k,y,e,f,s,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,d
-e,k,f,g,f,n,f,w,b,p,e,?,s,s,w,w,p,w,t,p,w,s,g
-e,x,s,w,f,n,f,w,b,p,e,?,s,s,w,w,p,w,t,p,w,s,g
-p,k,s,n,f,f,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,p
-p,k,y,n,f,s,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,d
-p,k,y,n,f,f,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,p
-p,k,s,e,f,f,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,p
-p,k,s,e,f,s,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,l
-e,b,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,o,o,p,n,v,l
-p,k,s,e,f,s,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,p
-e,x,s,g,f,n,f,w,b,p,e,?,s,s,w,w,p,w,t,p,w,n,g
-e,x,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,o,o,p,y,v,l
-e,b,s,w,f,n,f,w,b,w,e,?,k,k,w,w,p,w,t,p,w,n,g
-p,k,s,e,f,s,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,p
-e,x,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,o,o,p,n,v,l
-p,k,s,n,f,f,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,d
-p,k,s,e,f,f,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,d
-p,k,y,e,f,s,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,p
-e,b,f,w,f,n,f,w,b,p,e,?,s,s,w,w,p,w,t,p,w,s,g
-p,k,y,n,f,f,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,d
-p,k,s,e,f,s,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,d
-e,b,s,g,f,n,f,w,b,w,e,?,s,k,w,w,p,w,t,p,w,s,g
-p,k,s,n,f,y,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,d
-p,x,s,n,f,y,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,p
-p,k,y,e,f,f,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,l
-e,x,s,w,f,n,f,w,b,p,e,?,s,s,w,w,p,w,t,p,w,n,g
-e,x,s,w,f,n,f,w,b,g,e,?,k,k,w,w,p,w,t,p,w,s,g
-p,k,y,n,f,s,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,p
-e,k,s,n,f,n,f,c,b,w,e,b,y,y,n,n,p,w,t,p,w,y,d
-p,k,s,e,f,y,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,d
-p,k,y,e,f,s,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,l
-e,b,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,o,o,p,o,v,l
-e,b,f,g,f,n,f,w,b,w,e,?,s,k,w,w,p,w,t,p,w,n,g
-p,k,s,e,f,f,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,l
-p,k,s,n,f,s,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,l
-p,f,y,n,f,f,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,p
-p,f,y,n,f,f,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,d
-e,f,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,o,o,p,y,c,l
-p,k,y,n,f,y,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,d
-e,k,s,w,f,n,f,w,b,w,e,?,s,s,w,w,p,w,t,p,w,s,g
-e,b,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,o,o,p,b,c,l
-p,k,y,n,f,s,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,p
-p,k,s,n,f,y,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,p
-p,k,s,e,f,s,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,d
-e,k,s,w,f,n,f,w,b,p,e,?,s,k,w,w,p,w,t,p,w,n,g
-p,k,y,e,f,y,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,l
-e,x,s,w,f,n,f,w,b,w,e,?,s,s,w,w,p,w,t,p,w,s,g
-p,k,y,e,f,s,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,d
-p,k,y,e,f,s,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,d
-p,k,y,n,f,s,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,d
-p,f,y,c,f,m,a,c,b,w,e,c,k,y,c,c,p,w,n,n,w,c,d
-p,x,s,n,f,s,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,d
-e,k,f,g,f,n,f,w,b,w,e,?,k,s,w,w,p,w,t,p,w,s,g
-p,k,s,e,f,y,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,l
-p,x,y,n,f,m,f,c,b,w,e,c,k,y,c,c,p,w,n,n,w,c,d
-p,f,y,e,f,y,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,l
-e,k,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,n,o,p,b,c,l
-p,k,s,n,f,s,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,p
-e,b,f,g,f,n,f,w,b,g,e,?,s,k,w,w,p,w,t,p,w,n,g
-e,b,f,w,f,n,f,w,b,p,e,?,s,k,w,w,p,w,t,p,w,n,g
-e,x,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,o,o,p,y,v,l
-p,k,y,n,f,s,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,l
-e,f,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,n,o,p,o,c,l
-e,x,s,w,f,n,f,w,b,g,e,?,s,s,w,w,p,w,t,p,w,n,g
-p,k,s,e,f,s,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,l
-p,k,s,e,f,y,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,p
-e,x,s,g,f,n,f,w,b,w,e,?,k,k,w,w,p,w,t,p,w,n,g
-e,b,s,g,f,n,f,w,b,g,e,?,s,s,w,w,p,w,t,p,w,n,g
-p,k,y,n,f,f,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,d
-p,k,y,e,f,m,f,c,b,w,e,c,k,y,c,c,p,w,n,n,w,c,d
-p,k,y,e,f,f,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,p
-p,k,y,n,f,f,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,p
-e,x,f,w,f,n,f,w,b,p,e,?,k,s,w,w,p,w,t,p,w,s,g
-p,k,y,e,f,f,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,l
-e,b,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,n,o,p,b,v,l
-p,k,y,n,f,s,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,d
-p,k,s,n,f,s,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,p
-e,b,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,n,o,p,y,v,l
-e,f,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,n,o,p,n,v,l
-e,k,s,g,f,n,f,w,b,w,e,?,k,k,w,w,p,w,t,p,w,n,g
-e,f,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,o,o,p,y,c,l
-p,k,y,e,f,y,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,l
-e,b,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,n,o,p,b,v,l
-e,k,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,o,o,p,n,c,l
-p,k,y,e,f,y,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,d
-e,f,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,o,o,p,n,c,l
-p,k,s,e,f,f,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,d
-e,b,f,g,f,n,f,w,b,g,e,?,k,k,w,w,p,w,t,p,w,n,g
-p,k,y,n,f,y,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,d
-e,f,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,o,o,p,b,v,l
-p,k,s,n,f,s,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,l
-e,x,s,g,f,n,f,w,b,w,e,?,k,s,w,w,p,w,t,p,w,s,g
-e,x,s,g,f,n,f,w,b,g,e,?,k,k,w,w,p,w,t,p,w,s,g
-e,k,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,n,o,p,b,v,l
-e,x,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,n,o,p,b,c,l
-p,k,y,e,f,y,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,d
-e,x,f,g,f,n,f,w,b,w,e,?,k,k,w,w,p,w,t,p,w,n,g
-p,k,s,n,f,f,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,d
-p,k,y,e,f,s,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,d
-p,k,y,n,f,f,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,p
-p,k,s,e,f,f,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,d
-p,k,y,n,f,s,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,l
-p,k,y,e,f,s,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,l
-e,x,s,w,f,n,f,w,b,g,e,?,k,s,w,w,p,w,t,p,w,n,g
-p,k,s,n,f,s,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,l
-p,k,s,e,f,y,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,p
-p,k,s,e,f,s,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,p
-e,x,f,g,f,n,f,w,b,w,e,?,s,s,w,w,p,w,t,p,w,n,g
-e,x,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,o,o,p,o,c,l
-e,b,s,w,f,n,f,w,b,p,e,?,k,k,w,w,p,w,t,p,w,n,g
-p,k,s,n,f,f,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,l
-p,k,y,n,f,f,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,l
-p,k,s,n,f,s,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,d
-p,x,y,e,f,f,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,p
-p,k,y,e,f,f,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,d
-e,x,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,o,o,p,b,v,l
-e,x,s,g,f,n,f,w,b,g,e,?,k,s,w,w,p,w,t,p,w,s,g
-p,k,s,e,f,f,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,l
-p,k,y,e,f,f,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,l
-e,k,s,w,f,n,f,w,b,g,e,?,k,s,w,w,p,w,t,p,w,n,g
-e,b,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,o,o,p,b,v,l
-e,k,s,w,f,n,f,w,b,p,e,?,k,k,w,w,p,w,t,p,w,n,g
-p,k,s,e,f,s,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,d
-p,k,y,n,f,f,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,p
-p,k,s,e,f,y,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,l
-p,k,s,n,f,y,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,d
-p,x,s,n,f,s,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,l
-e,k,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,o,o,p,n,c,l
-p,k,s,n,f,f,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,d
-p,k,s,n,f,s,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,d
-p,k,y,e,f,f,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,l
-e,x,f,w,f,n,f,w,b,p,e,?,s,k,w,w,p,w,t,p,w,s,g
-p,f,s,e,f,y,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,d
-p,k,s,n,f,y,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,l
-p,k,s,e,f,f,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,d
-e,f,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,o,o,p,b,v,l
-e,x,y,n,f,n,f,c,b,w,e,b,y,y,n,n,p,w,t,p,w,y,d
-p,k,y,n,f,s,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,d
-p,k,s,e,f,s,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,l
-e,b,s,w,f,n,f,w,b,g,e,?,s,k,w,w,p,w,t,p,w,s,g
-p,k,y,n,f,s,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,p
-p,k,s,n,f,y,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,p
-e,x,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,o,o,p,n,c,l
-e,x,s,g,f,n,f,w,b,p,e,?,s,k,w,w,p,w,t,p,w,s,g
-e,x,s,g,f,n,f,w,b,g,e,?,k,k,w,w,p,w,t,p,w,n,g
-p,f,y,n,f,y,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,p
-p,f,y,n,f,s,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,d
-e,f,y,g,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,w,v,p
-p,k,s,e,f,y,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,d
-e,x,f,w,f,n,f,w,b,w,e,?,k,s,w,w,p,w,t,p,w,s,g
-p,k,y,n,f,s,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,p
-p,k,s,n,f,y,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,p
-p,x,s,e,f,f,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,p
-e,x,f,g,f,n,f,w,b,p,e,?,k,s,w,w,p,w,t,p,w,s,g
-e,f,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,o,o,p,n,v,l
-p,x,y,e,f,f,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,p
-e,b,f,w,f,n,f,w,b,w,e,?,s,s,w,w,p,w,t,p,w,s,g
-p,k,s,e,f,f,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,l
-e,x,f,g,f,n,f,w,b,p,e,?,s,s,w,w,p,w,t,p,w,s,g
-p,f,y,n,f,m,a,c,b,y,e,c,k,y,c,c,p,w,n,n,w,c,d
-e,x,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,n,o,p,o,v,l
-e,b,s,g,f,n,f,w,b,p,e,?,s,k,w,w,p,w,t,p,w,n,g
-e,b,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,o,o,p,o,v,l
-p,k,s,e,f,f,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,d
-e,f,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,o,o,p,o,v,l
-e,x,f,g,f,n,f,w,b,w,e,?,s,k,w,w,p,w,t,p,w,n,g
-p,k,s,n,f,f,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,p
-e,x,f,w,f,n,f,w,b,p,e,?,s,k,w,w,p,w,t,p,w,n,g
-e,f,y,p,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,w,v,p
-p,k,y,e,f,f,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,l
-e,k,f,g,f,n,f,w,b,w,e,?,s,k,w,w,p,w,t,p,w,n,g
-e,k,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,o,o,p,y,c,l
-p,k,s,n,f,f,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,d
-p,k,s,n,f,y,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,l
-p,k,s,e,f,s,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,p
-p,k,y,n,f,s,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,d
-e,k,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,n,o,p,o,c,l
-p,k,y,n,f,s,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,p
-e,k,f,w,f,n,f,w,b,p,e,?,s,k,w,w,p,w,t,p,w,s,g
-p,k,y,e,f,f,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,p
-e,f,s,n,f,n,f,c,b,w,e,b,y,y,n,n,p,w,t,p,w,y,p
-p,k,s,e,f,s,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,p
-e,x,f,g,f,n,f,w,b,w,e,?,k,k,w,w,p,w,t,p,w,s,g
-p,k,y,e,f,f,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,d
-p,k,y,e,f,y,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,d
-p,k,s,n,f,f,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,l
-e,b,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,n,o,p,o,c,l
-e,k,s,w,f,n,f,w,b,w,e,?,s,s,w,w,p,w,t,p,w,n,g
-e,b,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,o,o,p,o,c,l
-e,f,y,g,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,w,y,p
-p,k,y,e,f,m,a,c,b,w,e,c,k,y,c,c,p,w,n,n,w,c,d
-p,x,y,e,f,y,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,d
-p,k,y,n,f,f,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,p
-e,k,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,o,o,p,y,v,l
-e,k,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,o,o,p,o,c,l
-e,x,y,c,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,w,v,p
-p,k,s,e,f,s,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,l
-p,k,y,n,f,y,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,l
-p,k,s,e,f,s,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,l
-p,k,y,n,f,y,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,d
-p,k,y,e,f,f,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,p
-e,f,s,c,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,w,v,p
-e,x,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,n,o,p,n,c,l
-e,b,f,w,f,n,f,w,b,w,e,?,s,k,w,w,p,w,t,p,w,n,g
-e,k,f,g,f,n,f,w,b,g,e,?,s,k,w,w,p,w,t,p,w,s,g
-e,f,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,o,o,p,o,c,l
-e,k,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,n,o,p,o,c,l
-e,k,f,w,f,n,f,w,b,g,e,?,s,s,w,w,p,w,t,p,w,n,g
-p,k,y,e,f,f,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,l
-e,b,f,w,f,n,f,w,b,g,e,?,k,k,w,w,p,w,t,p,w,s,g
-p,k,y,n,f,y,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,p
-p,k,y,e,f,y,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,d
-p,k,y,n,f,f,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,d
-e,x,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,n,o,p,y,v,l
-e,x,y,n,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,w,y,p
-e,b,s,w,f,n,f,w,b,p,e,?,s,k,w,w,p,w,t,p,w,n,g
-p,f,y,n,f,f,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,d
-p,k,s,n,f,f,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,l
-p,k,s,e,f,y,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,p
-e,x,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,n,o,p,y,c,l
-e,x,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,n,o,p,n,v,l
-e,b,s,w,f,n,f,w,b,w,e,?,s,s,w,w,p,w,t,p,w,s,g
-p,x,y,e,f,f,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,p
-e,b,f,w,f,n,f,w,b,w,e,?,k,s,w,w,p,w,t,p,w,s,g
-e,b,f,w,f,n,f,w,b,g,e,?,s,k,w,w,p,w,t,p,w,n,g
-e,b,f,w,f,n,f,w,b,g,e,?,k,s,w,w,p,w,t,p,w,n,g
-e,k,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,o,o,p,o,v,l
-p,k,y,e,f,s,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,p
-e,k,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,n,o,p,o,v,l
-p,k,s,e,f,f,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,d
-p,k,y,c,f,m,a,c,b,y,e,c,k,y,c,c,p,w,n,n,w,c,d
-p,k,s,n,f,y,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,d
-e,x,s,g,f,n,f,w,b,g,e,?,s,k,w,w,p,w,t,p,w,n,g
-e,f,s,n,f,n,f,c,b,w,e,b,y,y,n,n,p,w,t,p,w,y,d
-e,k,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,n,o,p,y,c,l
-e,b,y,n,f,n,f,c,b,w,e,b,y,y,n,n,p,w,t,p,w,y,p
-p,x,s,n,f,f,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,l
-e,b,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,o,o,p,b,c,l
-p,f,s,n,f,y,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,p
-p,k,y,n,f,f,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,p
-e,k,s,g,f,n,f,w,b,w,e,?,k,s,w,w,p,w,t,p,w,n,g
-p,k,y,n,f,f,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,l
-e,k,f,g,f,n,f,w,b,g,e,?,k,s,w,w,p,w,t,p,w,s,g
-e,x,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,n,o,p,o,c,l
-p,k,s,e,f,y,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,p
-e,f,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,n,o,p,y,v,l
-p,k,s,n,f,y,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,l
-p,k,y,n,f,s,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,p
-e,k,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,n,o,p,n,v,l
-e,b,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,o,o,p,n,c,l
-e,x,y,n,f,n,f,c,b,w,e,b,y,y,n,n,p,w,t,p,w,y,p
-e,x,s,w,f,n,f,w,b,w,e,?,k,s,w,w,p,w,t,p,w,s,g
-e,f,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,n,o,p,o,v,l
-p,f,s,n,f,f,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,p
-p,x,y,e,f,s,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,d
-p,k,y,e,f,y,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,p
-p,k,s,e,f,f,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,d
-p,k,s,e,f,f,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,p
-e,b,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,n,o,p,n,v,l
-e,x,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,n,o,p,y,v,l
-e,f,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,o,o,p,y,v,l
-p,k,s,n,f,s,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,l
-e,b,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,o,o,p,b,v,l
-e,k,s,g,f,n,f,w,b,w,e,?,s,s,w,w,p,w,t,p,w,s,g
-e,k,f,w,f,n,f,w,b,p,e,?,k,k,w,w,p,w,t,p,w,s,g
-p,k,s,e,f,s,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,p
-e,b,f,w,f,n,f,w,b,g,e,?,k,k,w,w,p,w,t,p,w,n,g
-e,x,s,w,f,n,f,w,b,w,e,?,s,k,w,w,p,w,t,p,w,s,g
-e,b,f,w,f,n,f,w,b,p,e,?,s,k,w,w,p,w,t,p,w,s,g
-p,k,y,n,f,y,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,d
-p,f,s,n,f,y,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,l
-p,f,y,e,f,y,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,p
-p,k,s,e,f,y,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,p
-p,k,y,n,f,f,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,d
-e,x,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,n,o,p,y,c,l
-e,f,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,n,o,p,n,v,l
-p,k,y,n,f,f,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,l
-p,k,s,n,f,f,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,l
-e,x,f,g,f,n,f,w,b,g,e,?,s,s,w,w,p,w,t,p,w,n,g
-e,k,s,w,f,n,f,w,b,w,e,?,s,k,w,w,p,w,t,p,w,s,g
-p,k,y,e,f,f,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,p
-p,x,s,n,f,f,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,d
-p,k,y,n,f,f,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,l
-e,k,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,o,o,p,b,c,l
-e,b,f,w,f,n,f,w,b,p,e,?,k,s,w,w,p,w,t,p,w,n,g
-e,k,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,n,o,p,b,v,l
-p,k,y,e,f,y,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,d
-e,x,y,g,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,w,y,p
-p,k,s,e,f,f,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,l
-e,b,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,o,o,p,y,v,l
-p,k,y,e,f,y,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,p
-e,x,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,o,o,p,b,c,l
-e,f,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,n,o,p,b,v,l
-e,f,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,n,o,p,y,c,l
-p,k,y,e,f,y,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,p
-e,b,f,g,f,n,f,w,b,g,e,?,k,s,w,w,p,w,t,p,w,n,g
-p,k,s,e,f,y,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,d
-p,k,s,n,f,y,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,d
-p,k,y,n,f,f,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,l
-p,k,s,n,f,s,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,d
-e,x,f,w,f,n,f,w,b,w,e,?,s,k,w,w,p,w,t,p,w,s,g
-e,f,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,n,o,p,o,v,l
-p,k,y,n,f,f,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,d
-e,x,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,n,o,p,n,v,l
-e,b,f,g,f,n,f,w,b,w,e,?,k,s,w,w,p,w,t,p,w,n,g
-e,b,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,o,o,p,y,v,l
-e,x,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,n,o,p,o,v,l
-p,k,s,e,f,s,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,l
-p,k,s,n,f,f,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,p
-p,k,s,n,f,s,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,d
-p,k,s,n,f,s,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,p
-p,k,y,n,f,y,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,l
-e,k,f,w,f,n,f,w,b,w,e,?,k,s,w,w,p,w,t,p,w,s,g
-p,k,y,e,f,f,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,l
-e,k,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,o,o,p,b,v,l
-p,k,s,e,f,f,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,p
-e,b,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,o,o,p,b,c,l
-e,f,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,o,o,p,b,c,l
-e,k,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,n,o,p,n,c,l
-p,k,y,e,f,s,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,l
-e,k,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,o,o,p,y,v,l
-p,k,y,e,f,f,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,p
-e,k,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,n,o,p,y,v,l
-e,b,f,g,f,n,f,w,b,w,e,?,k,k,w,w,p,w,t,p,w,n,g
-e,f,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,o,o,p,o,c,l
-e,b,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,n,o,p,y,c,l
-e,f,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,n,o,p,o,v,l
-e,b,f,g,f,n,f,w,b,g,e,?,s,s,w,w,p,w,t,p,w,s,g
-p,k,y,e,f,f,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,l
-p,k,s,n,f,s,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,d
-p,k,y,n,f,f,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,p
-p,k,s,e,f,s,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,p
-p,k,y,n,f,y,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,l
-e,b,f,g,f,n,f,w,b,p,e,?,k,k,w,w,p,w,t,p,w,s,g
-e,k,f,w,f,n,f,w,b,g,e,?,s,k,w,w,p,w,t,p,w,s,g
-e,k,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,n,o,p,o,v,l
-p,x,s,e,f,f,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,p
-e,k,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,o,o,p,n,v,l
-p,k,y,e,f,f,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,d
-p,k,s,n,f,f,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,d
-p,k,y,e,f,f,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,p
-p,k,y,e,f,y,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,p
-p,x,s,n,f,y,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,d
-e,b,s,g,f,n,f,w,b,g,e,?,k,s,w,w,p,w,t,p,w,n,g
-p,x,y,c,f,m,f,c,b,y,e,c,k,y,c,c,p,w,n,n,w,c,d
-e,k,f,w,f,n,f,w,b,w,e,?,k,s,w,w,p,w,t,p,w,n,g
-p,k,y,n,f,s,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,l
-p,k,s,e,f,y,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,d
-e,k,f,w,f,n,f,w,b,w,e,?,k,k,w,w,p,w,t,p,w,s,g
-e,f,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,n,o,p,b,v,l
-p,k,s,e,f,s,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,p
-e,x,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,n,o,p,n,c,l
-e,k,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,n,o,p,o,c,l
-e,k,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,o,o,p,n,v,l
-e,k,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,n,o,p,y,v,l
-e,k,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,o,o,p,n,v,l
-e,x,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,o,o,p,n,c,l
-p,k,y,e,f,y,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,l
-e,b,s,w,f,n,f,w,b,w,e,?,s,s,w,w,p,w,t,p,w,n,g
-e,x,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,o,o,p,n,v,l
-e,k,s,w,f,n,f,w,b,p,e,?,s,s,w,w,p,w,t,p,w,n,g
-e,k,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,n,o,p,b,v,l
-p,k,y,e,f,y,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,d
-p,f,y,c,f,m,a,c,b,y,e,c,k,y,c,c,p,w,n,n,w,c,d
-e,x,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,o,o,p,o,v,l
-p,k,y,n,f,s,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,l
-p,k,s,e,f,y,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,d
-p,k,y,n,f,f,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,d
-e,k,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,o,o,p,b,c,l
-e,x,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,n,o,p,b,v,l
-e,f,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,o,o,p,b,c,l
-p,k,y,n,f,y,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,l
-e,x,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,o,o,p,o,c,l
index 320acf3..1c50a0e 100644 (file)
@@ -2326,14 +2326,14 @@ static void removeBowImageDescriptorsByCount( vector<ObdImage>& images, vector<M
     CV_Assert( bowImageDescriptors.size() == objectPresent.size() );
 }
 
-static void setSVMParams( const SVM::Params& svmParams, Mat& class_wts_cv, const Mat& responses, bool balanceClasses )
+static void setSVMParams( SVM::Params& svmParams, Mat& class_wts_cv, const Mat& responses, bool balanceClasses )
 {
     int pos_ex = countNonZero(responses == 1);
     int neg_ex = countNonZero(responses == -1);
     cout << pos_ex << " positive training samples; " << neg_ex << " negative training samples" << endl;
 
-    svmParams.svm_type = CvSVM::C_SVC;
-    svmParams.kernel_type = CvSVM::RBF;
+    svmParams.svmType = SVM::C_SVC;
+    svmParams.kernelType = SVM::RBF;
     if( balanceClasses )
     {
         Mat class_wts( 2, 1, CV_32FC1 );
@@ -2351,43 +2351,44 @@ static void setSVMParams( const SVM::Params& svmParams, Mat& class_wts_cv, const
             class_wts.at<float>(1) = static_cast<float>(pos_ex)/static_cast<float>(pos_ex+neg_ex);
         }
         class_wts_cv = class_wts;
-        svmParams.class_weights = &class_wts_cv;
+        svmParams.classWeights = class_wts_cv;
     }
 }
 
-static void setSVMTrainAutoParams( CvParamGrid& c_grid, CvParamGrid& gamma_grid,
-                            CvParamGrid& p_grid, CvParamGrid& nu_grid,
-                            CvParamGrid& coef_grid, CvParamGrid& degree_grid )
+static void setSVMTrainAutoParams( ParamGrid& c_grid, ParamGrid& gamma_grid,
+                            ParamGrid& p_grid, ParamGrid& nu_grid,
+                            ParamGrid& coef_grid, ParamGrid& degree_grid )
 {
-    c_grid = CvSVM::get_default_grid(CvSVM::C);
+    c_grid = SVM::getDefaultGrid(SVM::C);
 
-    gamma_grid = CvSVM::get_default_grid(CvSVM::GAMMA);
+    gamma_grid = SVM::getDefaultGrid(SVM::GAMMA);
 
-    p_grid = CvSVM::get_default_grid(CvSVM::P);
-    p_grid.step = 0;
+    p_grid = SVM::getDefaultGrid(SVM::P);
+    p_grid.logStep = 0;
 
-    nu_grid = CvSVM::get_default_grid(CvSVM::NU);
-    nu_grid.step = 0;
+    nu_grid = SVM::getDefaultGrid(SVM::NU);
+    nu_grid.logStep = 0;
 
-    coef_grid = CvSVM::get_default_grid(CvSVM::COEF);
-    coef_grid.step = 0;
+    coef_grid = SVM::getDefaultGrid(SVM::COEF);
+    coef_grid.logStep = 0;
 
-    degree_grid = CvSVM::get_default_grid(CvSVM::DEGREE);
-    degree_grid.step = 0;
+    degree_grid = SVM::getDefaultGrid(SVM::DEGREE);
+    degree_grid.logStep = 0;
 }
 
-static void trainSVMClassifier( CvSVM& svm, const SVMTrainParamsExt& svmParamsExt, const string& objClassName, VocData& vocData,
+static Ptr<SVM> trainSVMClassifier( const SVMTrainParamsExt& svmParamsExt, const string& objClassName, VocData& vocData,
                          Ptr<BOWImgDescriptorExtractor>& bowExtractor, const Ptr<FeatureDetector>& fdetector,
                          const string& resPath )
 {
     /* first check if a previously trained svm for the current class has been saved to file */
     string svmFilename = resPath + svmsDir + "/" + objClassName + ".xml.gz";
+    Ptr<SVM> svm;
 
     FileStorage fs( svmFilename, FileStorage::READ);
     if( fs.isOpened() )
     {
         cout << "*** LOADING SVM CLASSIFIER FOR CLASS " << objClassName << " ***" << endl;
-        svm.load( svmFilename.c_str() );
+        svm = StatModel::load<SVM>( svmFilename );
     }
     else
     {
@@ -2438,20 +2439,24 @@ static void trainSVMClassifier( CvSVM& svm, const SVMTrainParamsExt& svmParamsEx
         }
 
         cout << "TRAINING SVM FOR CLASS ..." << objClassName << "..." << endl;
-        CvSVMParams svmParams;
-        CvMat class_wts_cv;
+        SVM::Params svmParams;
+        Mat class_wts_cv;
         setSVMParams( svmParams, class_wts_cv, responses, svmParamsExt.balanceClasses );
-        CvParamGrid c_grid, gamma_grid, p_grid, nu_grid, coef_grid, degree_grid;
+        svm = SVM::create(svmParams);
+        ParamGrid c_grid, gamma_grid, p_grid, nu_grid, coef_grid, degree_grid;
         setSVMTrainAutoParams( c_grid, gamma_grid,  p_grid, nu_grid, coef_grid, degree_grid );
-        svm.train_auto( trainData, responses, Mat(), Mat(), svmParams, 10, c_grid, gamma_grid, p_grid, nu_grid, coef_grid, degree_grid );
+
+        svm->trainAuto(TrainData::create(trainData, ROW_SAMPLE, responses), 10,
+                       c_grid, gamma_grid, p_grid, nu_grid, coef_grid, degree_grid);
         cout << "SVM TRAINING FOR CLASS " << objClassName << " COMPLETED" << endl;
 
-        svm.save( svmFilename.c_str() );
+        svm->save( svmFilename );
         cout << "SAVED CLASSIFIER TO FILE" << endl;
     }
+    return svm;
 }
 
-static void computeConfidences( CvSVM& svm, const string& objClassName, VocData& vocData,
+static void computeConfidences( const Ptr<SVM>& svm, const string& objClassName, VocData& vocData,
                          Ptr<BOWImgDescriptorExtractor>& bowExtractor, const Ptr<FeatureDetector>& fdetector,
                          const string& resPath )
 {
@@ -2477,12 +2482,12 @@ static void computeConfidences( CvSVM& svm, const string& objClassName, VocData&
         if( imageIdx == 0 )
         {
             // In the first iteration, determine the sign of the positive class
-            float classVal = confidences[imageIdx] = svm.predict( bowImageDescriptors[imageIdx], false );
-            float scoreVal = confidences[imageIdx] = svm.predict( bowImageDescriptors[imageIdx], true );
+            float classVal = confidences[imageIdx] = svm->predict( bowImageDescriptors[imageIdx], noArray(), 0 );
+            float scoreVal = confidences[imageIdx] = svm->predict( bowImageDescriptors[imageIdx], noArray(), StatModel::RAW_OUTPUT );
             signMul = (classVal < 0) == (scoreVal < 0) ? 1.f : -1.f;
         }
         // svm output of decision function
-        confidences[imageIdx] = signMul * svm.predict( bowImageDescriptors[imageIdx], true );
+        confidences[imageIdx] = signMul * svm->predict( bowImageDescriptors[imageIdx], noArray(), StatModel::RAW_OUTPUT );
     }
 
     cout << "WRITING QUERY RESULTS TO VOC RESULTS FILE FOR CLASS " << objClassName << "..." << endl;
@@ -2592,9 +2597,8 @@ int main(int argc, char** argv)
     for( size_t classIdx = 0; classIdx < objClasses.size(); ++classIdx )
     {
         // Train a classifier on train dataset
-        CvSVM svm;
-        trainSVMClassifier( svm, svmTrainParamsExt, objClasses[classIdx], vocData,
-                            bowExtractor, featureDetector, resPath );
+        Ptr<SVM> svm = trainSVMClassifier( svmTrainParamsExt, objClasses[classIdx], vocData,
+                                           bowExtractor, featureDetector, resPath );
 
         // Now use the classifier over all images on the test dataset and rank according to score order
         // also calculating precision-recall etc.
index a07aae8..4076b63 100644 (file)
@@ -179,10 +179,7 @@ build_rtrees_classifier( const string& data_filename,
         // create classifier by using <data> and <responses>
         cout << "Training the classifier ...\n";
         Ptr<TrainData> tdata = prepare_train_data(data, responses, ntrain_samples);
-
-        // 3. train classifier
-        model = RTrees::create(RTrees::Params(10,10,0,false,15,Mat(),true,4,TC(100,0.01f)));
-        model->train( tdata );
+        model = StatModel::train<RTrees>(tdata, RTrees::Params(10,10,0,false,15,Mat(),true,4,TC(100,0.01f)));
         cout << endl;
     }
 
@@ -267,10 +264,12 @@ build_boost_classifier( const string& data_filename,
 
         Ptr<TrainData> tdata = TrainData::create(new_data, ROW_SAMPLE, new_responses,
                                                  noArray(), noArray(), noArray(), var_type);
-        model = Boost::create(Boost::Params(Boost::REAL, 100, 0.95, 5, false, Mat() ));
+        vector<double> priors(2);
+        priors[0] = 1;
+        priors[1] = 26;
 
         cout << "Training the classifier (may take a few minutes)...\n";
-        model->train(tdata);
+        model = StatModel::train<Boost>(tdata, Boost::Params(Boost::GENTLE, 100, 0.95, 5, false, Mat(priors) ));
         cout << endl;
     }
 
@@ -333,7 +332,6 @@ build_mlp_classifier( const string& data_filename,
     if( !ok )
         return ok;
 
-    int i, j;
     Ptr<ANN_MLP> model;
 
     int nsamples_all = data.rows;
@@ -360,14 +358,14 @@ build_mlp_classifier( const string& data_filename,
         // !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
 
         Mat train_data = data.rowRange(0, ntrain_samples);
-        Mat new_responses = Mat::zeros( ntrain_samples, class_count, CV_32F );
+        Mat train_responses = Mat::zeros( ntrain_samples, class_count, CV_32F );
 
         // 1. unroll the responses
         cout << "Unrolling the responses...\n";
-        for( i = 0; i < ntrain_samples; i++ )
+        for( int i = 0; i < ntrain_samples; i++ )
         {
-            int cls_label = responses.at<int>(i) - 'A'
-            new_responses.at<float>(i, cls_label) = 1.f;
+            int cls_label = responses.at<int>(i) - 'A';
+            train_responses.at<float>(i, cls_label) = 1.f;
         }
 
         // 2. train classifier
@@ -385,180 +383,63 @@ build_mlp_classifier( const string& data_filename,
         int max_iter = 1000;
 #endif
 
-        mlp.train( &train_data, new_responses, 0, 0,
-                  ANN_MLP::Params(TC(max_iter,0), method, method_param));
-
-
-        model = ANN_MLP::create() mlp.create( &layer_sizes );
-        printf( "Training the classifier (may take a few minutes)...\n");
-
-        cvReleaseMat( &new_responses );
-        printf("\n");
-    }
-
-    Mat mlp_response;
-
-    // compute prediction error on train and test data
-    for( i = 0; i < nsamples_all; i++ )
-    {
-        int best_class;
-        CvMat sample;
-        cvGetRow( data, &sample, i );
-        CvPoint max_loc;
-        mlp.predict( &sample, mlp_response );
-        cvMinMaxLoc( mlp_response, 0, 0, 0, &max_loc, 0 );
-        best_class = max_loc.x + 'A';
-
-        int r = fabs((double)best_class - responses->data.fl[i]) < FLT_EPSILON ? 1 : 0;
+        Ptr<TrainData> tdata = TrainData::create(train_data, ROW_SAMPLE, train_responses);
 
-        if( i < ntrain_samples )
-            train_hr += r;
-        else
-            test_hr += r;
+        cout << "Training the classifier (may take a few minutes)...\n";
+        model = StatModel::train<ANN_MLP>(tdata, ANN_MLP::Params(layer_sizes, ANN_MLP::SIGMOID_SYM, 0, 0, TC(max_iter,0), method, method_param));
+        cout << endl;
     }
 
-    test_hr /= (double)(nsamples_all-ntrain_samples);
-    train_hr /= (double)ntrain_samples;
-    printf( "Recognition rate: train = %.1f%%, test = %.1f%%\n",
-            train_hr*100., test_hr*100. );
-
-    if( !filename_to_save.empty() )
-        model->save( filename_to_save );
-
+    test_and_save_classifier(model, data, responses, ntrain_samples, 'A', filename_to_save);
     return true;
 }
 
 static bool
 build_knearest_classifier( const string& data_filename, int K )
 {
-    const int var_count = 16;
     Mat data;
-    CvMat train_data;
     Mat responses;
-
     bool ok = read_num_class_data( data_filename, 16, &data, &responses );
     if( !ok )
         return ok;
 
-    int nsamples_all = 0, ntrain_samples = 0;
-
-    nsamples_all = data->rows;
-    ntrain_samples = (int)(nsamples_all*0.8);
-
-    // 1. unroll the responses
-    printf( "Unrolling the responses...\n");
-    cvGetRows( data, &train_data, 0, ntrain_samples );
-
-    // 2. train classifier
-    Mat train_resp = cvCreateMat( ntrain_samples, 1, CV_32FC1);
-    for (int i = 0; i < ntrain_samples; i++)
-        train_resp->data.fl[i] = responses->data.fl[i];
-    Ptr<KNearest> model = KNearest::create(true);
-    model->train(train_data, train_resp);
+    Ptr<KNearest> model;
 
-    Mat nearests = cvCreateMat( (nsamples_all - ntrain_samples), K, CV_32FC1);
-    float* _sample = new float[var_count * (nsamples_all - ntrain_samples)];
-    CvMat sample = cvMat( nsamples_all - ntrain_samples, 16, CV_32FC1, _sample );
-    float* true_results = new float[nsamples_all - ntrain_samples];
-    for (int j = ntrain_samples; j < nsamples_all; j++)
-    {
-        float *s = data->data.fl + j * var_count;
-
-        for (int i = 0; i < var_count; i++)
-        {
-            sample.data.fl[(j - ntrain_samples) * var_count + i] = s[i];
-        }
-        true_results[j - ntrain_samples] = responses->data.fl[j];
-    }
-    CvMat *result = cvCreateMat(1, nsamples_all - ntrain_samples, CV_32FC1);
-    knearest.find_nearest(&sample, K, result, 0, nearests, 0);
-    int true_resp = 0;
-    int accuracy = 0;
-    for (int i = 0; i < nsamples_all - ntrain_samples; i++)
-    {
-        if (result->data.fl[i] == true_results[i])
-            true_resp++;
-        for(int k = 0; k < K; k++ )
-        {
-            if( nearests->data.fl[i * K + k] == true_results[i])
-            accuracy++;
-        }
-    }
-
-    printf("true_resp = %f%%\tavg accuracy = %f%%\n", (float)true_resp / (nsamples_all - ntrain_samples) * 100,
-                                                      (float)accuracy / (nsamples_all - ntrain_samples) / K * 100);
+    int nsamples_all = data.rows;
+    int ntrain_samples = (int)(nsamples_all*0.8);
 
-    delete[] true_results;
-    delete[] _sample;
-    cvReleaseMat( &train_resp );
-    cvReleaseMat( &nearests );
-    cvReleaseMat( &result );
-    cvReleaseMat( &data );
-    cvReleaseMat( &responses );
+    // create classifier by using <data> and <responses>
+    cout << "Training the classifier ...\n";
+    Ptr<TrainData> tdata = prepare_train_data(data, responses, ntrain_samples);
+    model = StatModel::train<KNearest>(tdata, KNearest::Params(K, true));
+    cout << endl;
 
-    return 0;
+    test_and_save_classifier(model, data, responses, ntrain_samples, 0, string());
+    return true;
 }
 
 static bool
 build_nbayes_classifier( const string& data_filename )
 {
-    const int var_count = 16;
     Mat data;
-    CvMat train_data;
     Mat responses;
-
     bool ok = read_num_class_data( data_filename, 16, &data, &responses );
     if( !ok )
         return ok;
 
-    int nsamples_all = 0, ntrain_samples = 0;
+    Ptr<NormalBayesClassifier> model;
 
-    nsamples_all = data->rows;
-    ntrain_samples = (int)(nsamples_all*0.5);
-
-    // 1. unroll the responses
-    printf( "Unrolling the responses...\n");
-    cvGetRows( data, &train_data, 0, ntrain_samples );
-
-    // 2. train classifier
-    Mat train_resp = cvCreateMat( ntrain_samples, 1, CV_32FC1);
-    for (int i = 0; i < ntrain_samples; i++)
-        train_resp->data.fl[i] = responses->data.fl[i];
-    CvNormalBayesClassifier nbayes(&train_data, train_resp);
-
-    float* _sample = new float[var_count * (nsamples_all - ntrain_samples)];
-    CvMat sample = cvMat( nsamples_all - ntrain_samples, 16, CV_32FC1, _sample );
-    float* true_results = new float[nsamples_all - ntrain_samples];
-    for (int j = ntrain_samples; j < nsamples_all; j++)
-    {
-        float *s = data->data.fl + j * var_count;
-
-        for (int i = 0; i < var_count; i++)
-        {
-            sample.data.fl[(j - ntrain_samples) * var_count + i] = s[i];
-        }
-        true_results[j - ntrain_samples] = responses->data.fl[j];
-    }
-    CvMat *result = cvCreateMat(1, nsamples_all - ntrain_samples, CV_32FC1);
-    nbayes.predict(&sample, result);
-    int true_resp = 0;
-    //int accuracy = 0;
-    for (int i = 0; i < nsamples_all - ntrain_samples; i++)
-    {
-        if (result->data.fl[i] == true_results[i])
-            true_resp++;
-    }
-
-    printf("true_resp = %f%%\n", (float)true_resp / (nsamples_all - ntrain_samples) * 100);
+    int nsamples_all = data.rows;
+    int ntrain_samples = (int)(nsamples_all*0.8);
 
-    delete[] true_results;
-    delete[] _sample;
-    cvReleaseMat( &train_resp );
-    cvReleaseMat( &result );
-    cvReleaseMat( &data );
-    cvReleaseMat( &responses );
+    // create classifier by using <data> and <responses>
+    cout << "Training the classifier ...\n";
+    Ptr<TrainData> tdata = prepare_train_data(data, responses, ntrain_samples);
+    model = StatModel::train<NormalBayesClassifier>(tdata, NormalBayesClassifier::Params());
+    cout << endl;
 
-    return 0;
+    test_and_save_classifier(model, data, responses, ntrain_samples, 0, string());
+    return true;
 }
 
 static bool
@@ -568,95 +449,47 @@ build_svm_classifier( const string& data_filename,
 {
     Mat data;
     Mat responses;
-    Mat train_resp;
-    CvMat train_data;
-    int nsamples_all = 0, ntrain_samples = 0;
-    int var_count;
-    Ptr<SVM> model;
-
     bool ok = read_num_class_data( data_filename, 16, &data, &responses );
     if( !ok )
         return ok;
 
-    ////////// SVM parameters ///////////////////////////////
-    CvSVMParams param;
-    param.kernel_type=CvSVM::LINEAR;
-    param.svm_type=CvSVM::C_SVC;
-    param.C=1;
-    ///////////////////////////////////////////////////////////
+    Ptr<SVM> model;
 
-    printf( "The database %s is loaded.\n", data_filename );
-    nsamples_all = data->rows;
-    ntrain_samples = (int)(nsamples_all*0.1);
-    var_count = data->cols;
+    int nsamples_all = data.rows;
+    int ntrain_samples = (int)(nsamples_all*0.8);
 
     // Create or load Random Trees classifier
-    if( filename_to_load )
+    if( !filename_to_load.empty() )
     {
-        // load classifier from the specified file
-        svm.load( filename_to_load );
+        model = load_classifier<SVM>(filename_to_load);
+        if( model.empty() )
+            return false;
         ntrain_samples = 0;
-        if( svm.get_var_count() == 0 )
-        {
-            printf( "Could not read the classifier %s\n", filename_to_load );
-            return -1;
-        }
-        printf( "The classifier %s is loaded.\n", filename_to_load );
     }
     else
     {
-        // train classifier
-        printf( "Training the classifier (may take a few minutes)...\n");
-        cvGetRows( data, &train_data, 0, ntrain_samples );
-        train_resp = cvCreateMat( ntrain_samples, 1, CV_32FC1);
-        for (int i = 0; i < ntrain_samples; i++)
-            train_resp->data.fl[i] = responses->data.fl[i];
-        svm.train(&train_data, train_resp, 0, 0, param);
-    }
-
-    // classification
-    std::vector<float> _sample(var_count * (nsamples_all - ntrain_samples));
-    CvMat sample = cvMat( nsamples_all - ntrain_samples, 16, CV_32FC1, &_sample[0] );
-    std::vector<float> true_results(nsamples_all - ntrain_samples);
-    for (int j = ntrain_samples; j < nsamples_all; j++)
-    {
-        float *s = data->data.fl + j * var_count;
-
-        for (int i = 0; i < var_count; i++)
-        {
-            sample.data.fl[(j - ntrain_samples) * var_count + i] = s[i];
-        }
-        true_results[j - ntrain_samples] = responses->data.fl[j];
-    }
-    CvMat *result = cvCreateMat(1, nsamples_all - ntrain_samples, CV_32FC1);
+        // create classifier by using <data> and <responses>
+        cout << "Training the classifier ...\n";
+        Ptr<TrainData> tdata = prepare_train_data(data, responses, ntrain_samples);
 
-    printf("Classification (may take a few minutes)...\n");
-    double t = (double)cvGetTickCount();
-    svm.predict(&sample, result);
-    t = (double)cvGetTickCount() - t;
-    printf("Prediction type: %gms\n", t/(cvGetTickFrequency()*1000.));
+        SVM::Params params;
+        params.svmType = SVM::C_SVC;
+        params.kernelType = SVM::LINEAR;
+        params.C = 1;
 
-    int true_resp = 0;
-    for (int i = 0; i < nsamples_all - ntrain_samples; i++)
-    {
-        if (result->data.fl[i] == true_results[i])
-            true_resp++;
+        model = StatModel::train<SVM>(tdata, params);
+        cout << endl;
     }
 
-    printf("true_resp = %f%%\n", (float)true_resp / (nsamples_all - ntrain_samples) * 100);
-
-    if( !filename_to_save.empty() )
-        model->save( filename_to_save );
-
+    test_and_save_classifier(model, data, responses, ntrain_samples, 0, filename_to_save);
     return true;
 }
 
 int main( int argc, char *argv[] )
 {
-    char* filename_to_save = 0;
-    char* filename_to_load = 0;
-    char default_data_filename[] = "./letter-recognition.data";
-    char* data_filename = default_data_filename;
+    string filename_to_save = "";
+    string filename_to_load = "";
+    string data_filename = "./letter-recognition.data";
     int method = 0;
 
     int i;
@@ -685,15 +518,15 @@ int main( int argc, char *argv[] )
         {
             method = 2;
         }
-        else if ( strcmp(argv[i], "-knearest") == 0)
+        else if( strcmp(argv[i], "-knearest") == 0 || strcmp(argv[i], "-knn") == 0 )
         {
             method = 3;
         }
-        else if ( strcmp(argv[i], "-nbayes") == 0)
+        else if( strcmp(argv[i], "-nbayes") == 0)
         {
             method = 4;
         }
-        else if ( strcmp(argv[i], "-svm") == 0)
+        else if( strcmp(argv[i], "-svm") == 0)
         {
             method = 5;
         }
diff --git a/samples/cpp/mushroom.cpp b/samples/cpp/mushroom.cpp
deleted file mode 100644 (file)
index 60eb9f0..0000000
+++ /dev/null
@@ -1,322 +0,0 @@
-#include "opencv2/core/core_c.h"
-#include "opencv2/ml/ml.hpp"
-#include <stdio.h>
-
-static void help()
-{
-    printf("\nThis program demonstrated the use of OpenCV's decision tree function for learning and predicting data\n"
-            "Usage :\n"
-            "./mushroom <path to agaricus-lepiota.data>\n"
-            "\n"
-            "The sample demonstrates how to build a decision tree for classifying mushrooms.\n"
-            "It uses the sample base agaricus-lepiota.data from UCI Repository, here is the link:\n"
-            "\n"
-            "Newman, D.J. & Hettich, S. & Blake, C.L. & Merz, C.J. (1998).\n"
-            "UCI Repository of machine learning databases\n"
-            "[http://www.ics.uci.edu/~mlearn/MLRepository.html].\n"
-            "Irvine, CA: University of California, Department of Information and Computer Science.\n"
-            "\n"
-            "// loads the mushroom database, which is a text file, containing\n"
-            "// one training sample per row, all the input variables and the output variable are categorical,\n"
-            "// the values are encoded by characters.\n\n");
-}
-
-static int mushroom_read_database( const char* filename, CvMat** data, CvMat** missing, CvMat** responses )
-{
-    const int M = 1024;
-    FILE* f = fopen( filename, "rt" );
-    CvMemStorage* storage;
-    CvSeq* seq;
-    char buf[M+2], *ptr;
-    float* el_ptr;
-    CvSeqReader reader;
-    int i, j, var_count = 0;
-
-    if( !f )
-        return 0;
-
-    // read the first line and determine the number of variables
-    if( !fgets( buf, M, f ))
-    {
-        fclose(f);
-        return 0;
-    }
-
-    for( ptr = buf; *ptr != '\0'; ptr++ )
-        var_count += *ptr == ',';
-    assert( ptr - buf == (var_count+1)*2 );
-
-    // create temporary memory storage to store the whole database
-    el_ptr = new float[var_count+1];
-    storage = cvCreateMemStorage();
-    seq = cvCreateSeq( 0, sizeof(*seq), (var_count+1)*sizeof(float), storage );
-
-    for(;;)
-    {
-        for( i = 0; i <= var_count; i++ )
-        {
-            int c = buf[i*2];
-            el_ptr[i] = c == '?' ? -1.f : (float)c;
-        }
-        if( i != var_count+1 )
-            break;
-        cvSeqPush( seq, el_ptr );
-        if( !fgets( buf, M, f ) || !strchr( buf, ',' ) )
-            break;
-    }
-    fclose(f);
-
-    // allocate the output matrices and copy the base there
-    *data = cvCreateMat( seq->total, var_count, CV_32F );
-    *missing = cvCreateMat( seq->total, var_count, CV_8U );
-    *responses = cvCreateMat( seq->total, 1, CV_32F );
-
-    cvStartReadSeq( seq, &reader );
-
-    for( i = 0; i < seq->total; i++ )
-    {
-        const float* sdata = (float*)reader.ptr + 1;
-        float* ddata = data[0]->data.fl + var_count*i;
-        float* dr = responses[0]->data.fl + i;
-        uchar* dm = missing[0]->data.ptr + var_count*i;
-
-        for( j = 0; j < var_count; j++ )
-        {
-            ddata[j] = sdata[j];
-            dm[j] = sdata[j] < 0;
-        }
-        *dr = sdata[-1];
-        CV_NEXT_SEQ_ELEM( seq->elem_size, reader );
-    }
-
-    cvReleaseMemStorage( &storage );
-    delete [] el_ptr;
-    return 1;
-}
-
-
-static CvDTree* mushroom_create_dtree( const CvMat* data, const CvMat* missing,
-                                const CvMat* responses, float p_weight )
-{
-    CvDTree* dtree;
-    CvMat* var_type;
-    int i, hr1 = 0, hr2 = 0, p_total = 0;
-    float priors[] = { 1, p_weight };
-
-    var_type = cvCreateMat( data->cols + 1, 1, CV_8U );
-    cvSet( var_type, cvScalarAll(CV_VAR_CATEGORICAL) ); // all the variables are categorical
-
-    dtree = new CvDTree;
-
-    dtree->train( data, CV_ROW_SAMPLE, responses, 0, 0, var_type, missing,
-                  CvDTreeParams( 8, // max depth
-                                 10, // min sample count
-                                 0, // regression accuracy: N/A here
-                                 true, // compute surrogate split, as we have missing data
-                                 15, // max number of categories (use sub-optimal algorithm for larger numbers)
-                                 10, // the number of cross-validation folds
-                                 true, // use 1SE rule => smaller tree
-                                 true, // throw away the pruned tree branches
-                                 priors // the array of priors, the bigger p_weight, the more attention
-                                        // to the poisonous mushrooms
-                                        // (a mushroom will be judjed to be poisonous with bigger chance)
-                                 ));
-
-    // compute hit-rate on the training database, demonstrates predict usage.
-    for( i = 0; i < data->rows; i++ )
-    {
-        CvMat sample, mask;
-        cvGetRow( data, &sample, i );
-        cvGetRow( missing, &mask, i );
-        double r = dtree->predict( &sample, &mask )->value;
-        int d = fabs(r - responses->data.fl[i]) >= FLT_EPSILON;
-        if( d )
-        {
-            if( r != 'p' )
-                hr1++;
-            else
-                hr2++;
-        }
-        p_total += responses->data.fl[i] == 'p';
-    }
-
-    printf( "Results on the training database:\n"
-            "\tPoisonous mushrooms mis-predicted: %d (%g%%)\n"
-            "\tFalse-alarms: %d (%g%%)\n", hr1, (double)hr1*100/p_total,
-            hr2, (double)hr2*100/(data->rows - p_total) );
-
-    cvReleaseMat( &var_type );
-
-    return dtree;
-}
-
-
-static const char* var_desc[] =
-{
-    "cap shape (bell=b,conical=c,convex=x,flat=f)",
-    "cap surface (fibrous=f,grooves=g,scaly=y,smooth=s)",
-    "cap color (brown=n,buff=b,cinnamon=c,gray=g,green=r,\n\tpink=p,purple=u,red=e,white=w,yellow=y)",
-    "bruises? (bruises=t,no=f)",
-    "odor (almond=a,anise=l,creosote=c,fishy=y,foul=f,\n\tmusty=m,none=n,pungent=p,spicy=s)",
-    "gill attachment (attached=a,descending=d,free=f,notched=n)",
-    "gill spacing (close=c,crowded=w,distant=d)",
-    "gill size (broad=b,narrow=n)",
-    "gill color (black=k,brown=n,buff=b,chocolate=h,gray=g,\n\tgreen=r,orange=o,pink=p,purple=u,red=e,white=w,yellow=y)",
-    "stalk shape (enlarging=e,tapering=t)",
-    "stalk root (bulbous=b,club=c,cup=u,equal=e,rhizomorphs=z,rooted=r)",
-    "stalk surface above ring (ibrous=f,scaly=y,silky=k,smooth=s)",
-    "stalk surface below ring (ibrous=f,scaly=y,silky=k,smooth=s)",
-    "stalk color above ring (brown=n,buff=b,cinnamon=c,gray=g,orange=o,\n\tpink=p,red=e,white=w,yellow=y)",
-    "stalk color below ring (brown=n,buff=b,cinnamon=c,gray=g,orange=o,\n\tpink=p,red=e,white=w,yellow=y)",
-    "veil type (partial=p,universal=u)",
-    "veil color (brown=n,orange=o,white=w,yellow=y)",
-    "ring number (none=n,one=o,two=t)",
-    "ring type (cobwebby=c,evanescent=e,flaring=f,large=l,\n\tnone=n,pendant=p,sheathing=s,zone=z)",
-    "spore print color (black=k,brown=n,buff=b,chocolate=h,green=r,\n\torange=o,purple=u,white=w,yellow=y)",
-    "population (abundant=a,clustered=c,numerous=n,\n\tscattered=s,several=v,solitary=y)",
-    "habitat (grasses=g,leaves=l,meadows=m,paths=p\n\turban=u,waste=w,woods=d)",
-    0
-};
-
-
-static void print_variable_importance( CvDTree* dtree )
-{
-    const CvMat* var_importance = dtree->get_var_importance();
-    int i;
-    char input[1000];
-
-    if( !var_importance )
-    {
-        printf( "Error: Variable importance can not be retrieved\n" );
-        return;
-    }
-
-    printf( "Print variable importance information? (y/n) " );
-    int values_read = scanf( "%1s", input );
-    CV_Assert(values_read == 1);
-
-    if( input[0] != 'y' && input[0] != 'Y' )
-        return;
-
-    for( i = 0; i < var_importance->cols*var_importance->rows; i++ )
-    {
-        double val = var_importance->data.db[i];
-        char buf[100];
-        int len = (int)(strchr( var_desc[i], '(' ) - var_desc[i] - 1);
-        strncpy( buf, var_desc[i], len );
-        buf[len] = '\0';
-        printf( "%s", buf );
-        printf( ": %g%%\n", val*100. );
-    }
-}
-
-static void interactive_classification( CvDTree* dtree )
-{
-    char input[1000];
-    const CvDTreeNode* root;
-    CvDTreeTrainData* data;
-
-    if( !dtree )
-        return;
-
-    root = dtree->get_root();
-    data = dtree->get_data();
-
-    for(;;)
-    {
-        const CvDTreeNode* node;
-
-        printf( "Start/Proceed with interactive mushroom classification (y/n): " );
-        int values_read = scanf( "%1s", input );
-        CV_Assert(values_read == 1);
-
-        if( input[0] != 'y' && input[0] != 'Y' )
-            break;
-        printf( "Enter 1-letter answers, '?' for missing/unknown value...\n" );
-
-        // custom version of predict
-        node = root;
-        for(;;)
-        {
-            CvDTreeSplit* split = node->split;
-            int dir = 0;
-
-            if( !node->left || node->Tn <= dtree->get_pruned_tree_idx() || !node->split )
-                break;
-
-            for( ; split != 0; )
-            {
-                int vi = split->var_idx, j;
-                int count = data->cat_count->data.i[vi];
-                const int* map = data->cat_map->data.i + data->cat_ofs->data.i[vi];
-
-                printf( "%s: ", var_desc[vi] );
-                values_read = scanf( "%1s", input );
-                CV_Assert(values_read == 1);
-
-                if( input[0] == '?' )
-                {
-                    split = split->next;
-                    continue;
-                }
-
-                // convert the input character to the normalized value of the variable
-                for( j = 0; j < count; j++ )
-                    if( map[j] == input[0] )
-                        break;
-                if( j < count )
-                {
-                    dir = (split->subset[j>>5] & (1 << (j&31))) ? -1 : 1;
-                    if( split->inversed )
-                        dir = -dir;
-                    break;
-                }
-                else
-                    printf( "Error: unrecognized value\n" );
-            }
-
-            if( !dir )
-            {
-                printf( "Impossible to classify the sample\n");
-                node = 0;
-                break;
-            }
-            node = dir < 0 ? node->left : node->right;
-        }
-
-        if( node )
-            printf( "Prediction result: the mushroom is %s\n",
-                    node->class_idx == 0 ? "EDIBLE" : "POISONOUS" );
-        printf( "\n-----------------------------\n" );
-    }
-}
-
-
-int main( int argc, char** argv )
-{
-    CvMat *data = 0, *missing = 0, *responses = 0;
-    CvDTree* dtree;
-    const char* base_path = argc >= 2 ? argv[1] : "agaricus-lepiota.data";
-
-    help();
-
-    if( !mushroom_read_database( base_path, &data, &missing, &responses ) )
-    {
-        printf( "\nUnable to load the training database\n\n");
-        help();
-        return -1;
-    }
-
-    dtree = mushroom_create_dtree( data, missing, responses,
-        10 // poisonous mushrooms will have 10x higher weight in the decision tree
-        );
-    cvReleaseMat( &data );
-    cvReleaseMat( &missing );
-    cvReleaseMat( &responses );
-
-    print_variable_importance( dtree );
-    interactive_classification( dtree );
-    delete dtree;
-
-    return 0;
-}
index ef00915..3aa4d9b 100644 (file)
@@ -102,8 +102,7 @@ static void predict_and_paint(const Ptr<StatModel>& model, Mat& dst)
 static void find_decision_boundary_NBC()
 {
     // learn classifier
-    Ptr<NormalBayesClassifier> normalBayesClassifier = NormalBayesClassifier::create();
-    normalBayesClassifier->train(prepare_train_data());
+    Ptr<NormalBayesClassifier> normalBayesClassifier = StatModel::train<NormalBayesClassifier>(prepare_train_data(), NormalBayesClassifier::Params());
 
     predict_and_paint(normalBayesClassifier, imgDst);
 }
@@ -113,10 +112,7 @@ static void find_decision_boundary_NBC()
 #if _KNN_
 static void find_decision_boundary_KNN( int K )
 {
-    Ptr<KNearest> knn = KNearest::create(true);
-    knn->setDefaultK(K);
-    knn->train(prepare_train_data());
-
+    Ptr<KNearest> knn = StatModel::train<KNearest>(prepare_train_data(), KNearest::Params(K, true));
     predict_and_paint(knn, imgDst);
 }
 #endif
@@ -124,9 +120,7 @@ static void find_decision_boundary_KNN( int K )
 #if _SVM_
 static void find_decision_boundary_SVM( SVM::Params params )
 {
-    Ptr<SVM> svm = SVM::create(params);
-    svm->train(prepare_train_data());
-
+    Ptr<SVM> svm = StatModel::train<SVM>(prepare_train_data(), params);
     predict_and_paint(svm, imgDst);
 
     Mat sv = svm->getSupportVectors();
@@ -149,8 +143,7 @@ static void find_decision_boundary_DT()
     params.use1SERule = false;
     params.truncatePrunedTree = false;
 
-    Ptr<DTrees> dtree = DTrees::create(params);
-    dtree->train(prepare_train_data());
+    Ptr<DTrees> dtree = StatModel::train<DTrees>(prepare_train_data(), params);
 
     predict_and_paint(dtree, imgDst);
 }
@@ -167,8 +160,7 @@ static void find_decision_boundary_BT()
                           Mat() // priors
                           );
 
-    Ptr<Boost> boost = Boost::create(params);
-    boost->train(prepare_train_data());
+    Ptr<Boost> boost = StatModel::train<Boost>(prepare_train_data(), params);
     predict_and_paint(boost, imgDst);
 }
 
@@ -185,8 +177,7 @@ static void find_decision_boundary_GBT()
                          false // use_surrogates )
                          );
 
-    Ptr<GBTrees> gbtrees = GBTrees::create(params);
-    gbtrees->train(prepare_train_data());
+    Ptr<GBTrees> gbtrees = StatModel::train<GBTrees>(prepare_train_data(), params);
     predict_and_paint(gbtrees, imgDst);
 }
 #endif
@@ -205,8 +196,7 @@ static void find_decision_boundary_RF()
                         TermCriteria(TermCriteria::MAX_ITER, 5, 0) // max_num_of_trees_in_the_forest,
                        );
 
-    Ptr<RTrees> rtrees = RTrees::create(params);
-    rtrees->train(prepare_train_data());
+    Ptr<RTrees> rtrees = StatModel::train<RTrees>(prepare_train_data(), params);
     predict_and_paint(rtrees, imgDst);
 }
 
@@ -215,9 +205,8 @@ static void find_decision_boundary_RF()
 #if _ANN_
 static void find_decision_boundary_ANN( const Mat&  layer_sizes )
 {
-    ANN_MLP::Params params(TermCriteria(TermCriteria::MAX_ITER+TermCriteria::EPS, 300, FLT_EPSILON),
+    ANN_MLP::Params params(layer_sizes, ANN_MLP::SIGMOID_SYM, 1, 1, TermCriteria(TermCriteria::MAX_ITER+TermCriteria::EPS, 300, FLT_EPSILON),
                            ANN_MLP::Params::BACKPROP, 0.001);
-    Ptr<ANN_MLP> ann = ANN_MLP::create(layer_sizes, params, ANN_MLP::SIGMOID_SYM, 1, 1 );
 
     Mat trainClasses = Mat::zeros( trainedPoints.size(), classColors.size(), CV_32FC1 );
     for( int i = 0; i < trainClasses.rows; i++ )
@@ -228,7 +217,7 @@ static void find_decision_boundary_ANN( const Mat&  layer_sizes )
     Mat samples = prepare_train_samples(trainedPoints);
     Ptr<TrainData> tdata = TrainData::create(samples, ROW_SAMPLE, trainClasses);
 
-    ann->train(tdata);
+    Ptr<ANN_MLP> ann = StatModel::train<ANN_MLP>(tdata, params);
     predict_and_paint(ann, imgDst);
 }
 #endif
@@ -340,18 +329,15 @@ int main()
             img.copyTo( imgDst );
 #if _NBC_
             find_decision_boundary_NBC();
-            namedWindow( "NormalBayesClassifier", WINDOW_AUTOSIZE );
             imshow( "NormalBayesClassifier", imgDst );
 #endif
 #if _KNN_
             int K = 3;
             find_decision_boundary_KNN( K );
-            namedWindow( "kNN", WINDOW_AUTOSIZE );
             imshow( "kNN", imgDst );
 
             K = 15;
             find_decision_boundary_KNN( K );
-            namedWindow( "kNN2", WINDOW_AUTOSIZE );
             imshow( "kNN2", imgDst );
 #endif
 
@@ -369,36 +355,30 @@ int main()
             params.termCrit = TermCriteria(TermCriteria::MAX_ITER+TermCriteria::EPS, 1000, 0.01);
 
             find_decision_boundary_SVM( params );
-            namedWindow( "classificationSVM1", WINDOW_AUTOSIZE );
             imshow( "classificationSVM1", imgDst );
 
             params.C = 10;
             find_decision_boundary_SVM( params );
-            namedWindow( "classificationSVM2", WINDOW_AUTOSIZE );
             imshow( "classificationSVM2", imgDst );
 #endif
 
 #if _DT_
             find_decision_boundary_DT();
-            namedWindow( "DT", WINDOW_AUTOSIZE );
             imshow( "DT", imgDst );
 #endif
 
 #if _BT_
             find_decision_boundary_BT();
-            namedWindow( "BT", WINDOW_AUTOSIZE );
             imshow( "BT", imgDst);
 #endif
 
 #if _GBT_
             find_decision_boundary_GBT();
-            namedWindow( "GBT", WINDOW_AUTOSIZE );
             imshow( "GBT", imgDst);
 #endif
 
 #if _RF_
             find_decision_boundary_RF();
-            namedWindow( "RF", WINDOW_AUTOSIZE );
             imshow( "RF", imgDst);
 #endif
 
@@ -408,13 +388,11 @@ int main()
             layer_sizes1.at<int>(1) = 5;
             layer_sizes1.at<int>(2) = classColors.size();
             find_decision_boundary_ANN( layer_sizes1 );
-            namedWindow( "ANN", WINDOW_AUTOSIZE );
             imshow( "ANN", imgDst );
 #endif
 
 #if _EM_
             find_decision_boundary_EM();
-            namedWindow( "EM", WINDOW_AUTOSIZE );
             imshow( "EM", imgDst );
 #endif
         }
index e3ee190..fbd217a 100644 (file)
@@ -8,9 +8,10 @@
 #include <time.h>
 
 using namespace cv;
+using namespace cv::ml;
 using namespace std;
 
-void get_svm_detector(const SVM& svm, vector< float > & hog_detector );
+void get_svm_detector(const Ptr<SVM>& svm, vector< float > & hog_detector );
 void convert_to_ml(const std::vector< cv::Mat > & train_samples, cv::Mat& trainData );
 void load_images( const string & prefix, const string & filename, vector< Mat > & img_lst );
 void sample_neg( const vector< Mat > & full_neg_lst, vector< Mat > & neg_lst, const Size & size );
@@ -20,49 +21,24 @@ void train_svm( const vector< Mat > & gradient_lst, const vector< int > & labels
 void draw_locations( Mat & img, const vector< Rect > & locations, const Scalar & color );
 void test_it( const Size & size );
 
-void get_svm_detector(const SVM& svm, vector< float > & hog_detector )
+void get_svm_detector(const Ptr<SVM>& svm, vector< float > & hog_detector )
 {
-    // get the number of variables
-    const int var_all = svm.get_var_count();
-    // get the number of support vectors
-    const int sv_total = svm.get_support_vector_count();
-    // get the decision function
-    const CvSVMDecisionFunc* decision_func = svm.get_decision_function();
     // get the support vectors
-    const float** sv = new const float*[ sv_total ];
-    for( int i = 0 ; i < sv_total ; ++i )
-        sv[ i ] = svm.get_support_vector(i);
-
-    CV_Assert( var_all > 0 &&
-        sv_total > 0 &&
-        decision_func != 0 &&
-        decision_func->alpha != 0 &&
-        decision_func->sv_count == sv_total );
-
-    float svi = 0.f;
-
-    hog_detector.clear(); //clear stuff in vector.
-    hog_detector.reserve( var_all + 1 ); //reserve place for memory efficiency.
-
-     /**
-    * hog_detector^i = \sum_j support_vector_j^i * \alpha_j
-    * hog_detector^dim = -\rho
-    */
-   for( int i = 0 ; i < var_all ; ++i )
-    {
-        svi = 0.f;
-        for( int j = 0 ; j < sv_total ; ++j )
-        {
-            if( decision_func->sv_index != NULL ) // sometime the sv_index isn't store on YML/XML.
-                svi += (float)( sv[decision_func->sv_index[j]][i] * decision_func->alpha[ j ] );
-            else
-                svi += (float)( sv[j][i] * decision_func->alpha[ j ] );
-        }
-        hog_detector.push_back( svi );
-    }
-    hog_detector.push_back( (float)-decision_func->rho );
-
-    delete[] sv;
+    Mat sv = svm->getSupportVectors();
+    const int sv_total = sv.rows;
+    // get the decision function
+    Mat alpha, svidx;
+    double rho = svm->getDecisionFunction(0, alpha, svidx);
+
+    CV_Assert( alpha.total() == 1 && svidx.total() == 1 && sv_total == 1 );
+    CV_Assert( (alpha.type() == CV_64F && alpha.at<double>(0) == 1.) ||
+               (alpha.type() == CV_32F && alpha.at<float>(0) == 1.f) );
+    CV_Assert( sv.type() == CV_32F );
+    hog_detector.clear();
+
+    hog_detector.resize(sv.cols + 1);
+    memcpy(&hog_detector[0], sv.data, sv.cols*sizeof(hog_detector[0]));
+    hog_detector[sv.cols] = (float)-rho;
 }
 
 
@@ -263,7 +239,7 @@ Mat get_hogdescriptor_visu(const Mat& color_origImg, vector<float>& descriptorVa
             int mx = drawX + cellSize/2;
             int my = drawY + cellSize/2;
 
-            rectangle(visu, Point((int)(drawX*zoomFac), (int)(drawY*zoomFac)), Point((int)((drawX+cellSize)*zoomFac), (int)((drawY+cellSize)*zoomFac)), CV_RGB(100,100,100), 1);
+            rectangle(visu, Point((int)(drawX*zoomFac), (int)(drawY*zoomFac)), Point((int)((drawX+cellSize)*zoomFac), (int)((drawY+cellSize)*zoomFac)), Scalar(100,100,100), 1);
 
             // draw in each cell all 9 gradient strengths
             for (int bin=0; bin<gradientBinSize; bin++)
@@ -288,7 +264,7 @@ Mat get_hogdescriptor_visu(const Mat& color_origImg, vector<float>& descriptorVa
                 float y2 = my + dirVecY * currentGradStrength * maxVecLen * scale;
 
                 // draw gradient visualization
-                line(visu, Point((int)(x1*zoomFac),(int)(y1*zoomFac)), Point((int)(x2*zoomFac),(int)(y2*zoomFac)), CV_RGB(0,255,0), 1);
+                line(visu, Point((int)(x1*zoomFac),(int)(y1*zoomFac)), Point((int)(x2*zoomFac),(int)(y2*zoomFac)), Scalar(0,255,0), 1);
 
             } // for (all bins)
 
@@ -337,28 +313,26 @@ void compute_hog( const vector< Mat > & img_lst, vector< Mat > & gradient_lst, c
 
 void train_svm( const vector< Mat > & gradient_lst, const vector< int > & labels )
 {
-    SVM svm;
-
     /* Default values to train SVM */
-    SVMParams params;
+    SVM::Params params;
     params.coef0 = 0.0;
     params.degree = 3;
-    params.term_crit.epsilon = 1e-3;
+    params.termCrit.epsilon = 1e-3;
     params.gamma = 0;
-    params.kernel_type = SVM::LINEAR;
+    params.kernelType = SVM::LINEAR;
     params.nu = 0.5;
     params.p = 0.1; // for EPSILON_SVR, epsilon in loss function?
     params.C = 0.01; // From paper, soft classifier
-    params.svm_type = SVM::EPS_SVR; // C_SVC; // EPSILON_SVR; // may be also NU_SVR; // do regression task
+    params.svmType = SVM::EPS_SVR; // C_SVC; // EPSILON_SVR; // may be also NU_SVR; // do regression task
 
     Mat train_data;
     convert_to_ml( gradient_lst, train_data );
 
     clog << "Start training...";
-    svm.train( train_data, Mat( labels ), Mat(), Mat(), params );
+    Ptr<SVM> svm = StatModel::train<SVM>(train_data, ROW_SAMPLE, Mat(labels), params);
     clog << "...[done]" << endl;
 
-    svm.save( "my_people_detector.yml" );
+    svm->save( "my_people_detector.yml" );
 }
 
 void draw_locations( Mat & img, const vector< Rect > & locations, const Scalar & color )
@@ -380,7 +354,7 @@ void test_it( const Size & size )
     Scalar reference( 0, 255, 0 );
     Scalar trained( 0, 0, 255 );
     Mat img, draw;
-    SVM svm;
+    Ptr<SVM> svm;
     HOGDescriptor hog;
     HOGDescriptor my_hog;
     my_hog.winSize = size;
@@ -388,7 +362,7 @@ void test_it( const Size & size )
     vector< Rect > locations;
 
     // Load the trained SVM.
-    svm.load( "my_people_detector.yml" );
+    svm = StatModel::load<SVM>( "my_people_detector.yml" );
     // Set the trained svm to my_hog
     vector< float > hog_detector;
     get_svm_detector( svm, hog_detector );
index 2c3046f..6defc31 100644 (file)
@@ -1,63 +1,35 @@
 #include "opencv2/ml/ml.hpp"
-#include "opencv2/core/core_c.h"
+#include "opencv2/core/core.hpp"
 #include "opencv2/core/utility.hpp"
 #include <stdio.h>
+#include <string>
 #include <map>
 
+using namespace cv;
+using namespace cv::ml;
+
 static void help()
 {
     printf(
-        "\nThis sample demonstrates how to use different decision trees and forests including boosting and random trees:\n"
-        "CvDTree dtree;\n"
-        "CvBoost boost;\n"
-        "CvRTrees rtrees;\n"
-        "CvERTrees ertrees;\n"
-        "CvGBTrees gbtrees;\n"
-        "Call:\n\t./tree_engine [-r <response_column>] [-c] <csv filename>\n"
+        "\nThis sample demonstrates how to use different decision trees and forests including boosting and random trees.\n"
+        "Usage:\n\t./tree_engine [-r <response_column>] [-ts type_spec] <csv filename>\n"
         "where -r <response_column> specified the 0-based index of the response (0 by default)\n"
-        "-c specifies that the response is categorical (it's ordered by default) and\n"
+        "-ts specifies the var type spec in the form ord[n1,n2-n3,n4-n5,...]cat[m1-m2,m3,m4-m5,...]\n"
         "<csv filename> is the name of training data file in comma-separated value format\n\n");
 }
 
-
-static int count_classes(CvMLData& data)
+static void train_and_print_errs(Ptr<StatModel> model, const Ptr<TrainData>& data)
 {
-    cv::Mat r = cv::cvarrToMat(data.get_responses());
-    std::map<int, int> rmap;
-    int i, n = (int)r.total();
-    for( i = 0; i < n; i++ )
+    bool ok = model->train(data);
+    if( !ok )
     {
-        float val = r.at<float>(i);
-        int ival = cvRound(val);
-        if( ival != val )
-            return -1;
-        rmap[ival] = 1;
+        printf("Training failed\n");
     }
-    return (int)rmap.size();
-}
-
-static void print_result(float train_err, float test_err, const CvMat* _var_imp)
-{
-    printf( "train error    %f\n", train_err );
-    printf( "test error    %f\n\n", test_err );
-
-    if (_var_imp)
+    else
     {
-        cv::Mat var_imp = cv::cvarrToMat(_var_imp), sorted_idx;
-        cv::sortIdx(var_imp, sorted_idx, CV_SORT_EVERY_ROW + CV_SORT_DESCENDING);
-
-        printf( "variable importance:\n" );
-        int i, n = (int)var_imp.total();
-        int type = var_imp.type();
-        CV_Assert(type == CV_32F || type == CV_64F);
-
-        for( i = 0; i < n; i++)
-        {
-            int k = sorted_idx.at<int>(i);
-            printf( "%d\t%f\n", k, type == CV_32F ? var_imp.at<float>(k) : var_imp.at<double>(k));
-        }
+        printf( "train error: %f\n", model->calcError(data, false, noArray()) );
+        printf( "test error: %f\n\n", model->calcError(data, true, noArray()) );
     }
-    printf("\n");
 }
 
 int main(int argc, char** argv)
@@ -69,14 +41,14 @@ int main(int argc, char** argv)
     }
     const char* filename = 0;
     int response_idx = 0;
-    bool categorical_response = false;
+    std::string typespec;
 
     for(int i = 1; i < argc; i++)
     {
         if(strcmp(argv[i], "-r") == 0)
             sscanf(argv[++i], "%d", &response_idx);
-        else if(strcmp(argv[i], "-c") == 0)
-            categorical_response = true;
+        else if(strcmp(argv[i], "-ts") == 0)
+            typespec = argv[++i];
         else if(argv[i][0] != '-' )
             filename = argv[i];
         else
@@ -88,52 +60,32 @@ int main(int argc, char** argv)
     }
 
     printf("\nReading in %s...\n\n",filename);
-    CvDTree dtree;
-    CvBoost boost;
-    CvRTrees rtrees;
-    CvERTrees ertrees;
-    CvGBTrees gbtrees;
-
-    CvMLData data;
+    const double train_test_split_ratio = 0.5;
 
+    Ptr<TrainData> data = TrainData::loadFromCSV(filename, 0, response_idx, response_idx+1, typespec);
 
-    CvTrainTestSplit spl( 0.5f );
-
-    if ( data.read_csv( filename ) == 0)
+    if( data.empty() )
     {
-        data.set_response_idx( response_idx );
-        if(categorical_response)
-            data.change_var_type( response_idx, CV_VAR_CATEGORICAL );
-        data.set_train_test_split( &spl );
-
-        printf("======DTREE=====\n");
-        dtree.train( &data, CvDTreeParams( 10, 2, 0, false, 16, 0, false, false, 0 ));
-        print_result( dtree.calc_error( &data, CV_TRAIN_ERROR), dtree.calc_error( &data, CV_TEST_ERROR ), dtree.get_var_importance() );
-
-        if( categorical_response && count_classes(data) == 2 )
-        {
-        printf("======BOOST=====\n");
-        boost.train( &data, CvBoostParams(CvBoost::DISCRETE, 100, 0.95, 2, false, 0));
-        print_result( boost.calc_error( &data, CV_TRAIN_ERROR ), boost.calc_error( &data, CV_TEST_ERROR ), 0 ); //doesn't compute importance
-        }
+        printf("ERROR: File %s can not be read\n", filename);
+        return 0;
+    }
 
-        printf("======RTREES=====\n");
-        rtrees.train( &data, CvRTParams( 10, 2, 0, false, 16, 0, true, 0, 100, 0, CV_TERMCRIT_ITER ));
-        print_result( rtrees.calc_error( &data, CV_TRAIN_ERROR), rtrees.calc_error( &data, CV_TEST_ERROR ), rtrees.get_var_importance() );
+    data->setTrainTestSplitRatio(train_test_split_ratio);
 
-        printf("======ERTREES=====\n");
-        ertrees.train( &data, CvRTParams( 18, 2, 0, false, 16, 0, true, 0, 100, 0, CV_TERMCRIT_ITER ));
-        print_result( ertrees.calc_error( &data, CV_TRAIN_ERROR), ertrees.calc_error( &data, CV_TEST_ERROR ), ertrees.get_var_importance() );
+    printf("======DTREE=====\n");
+    Ptr<DTrees> dtree = DTrees::create(DTrees::Params( 10, 2, 0, false, 16, 0, false, false, Mat() ));
+    train_and_print_errs(dtree, data);
 
-        printf("======GBTREES=====\n");
-        if (categorical_response)
-            gbtrees.train( &data, CvGBTreesParams(CvGBTrees::DEVIANCE_LOSS, 100, 0.1f, 0.8f, 5, false));
-        else
-            gbtrees.train( &data, CvGBTreesParams(CvGBTrees::SQUARED_LOSS, 100, 0.1f, 0.8f, 5, false));
-        print_result( gbtrees.calc_error( &data, CV_TRAIN_ERROR), gbtrees.calc_error( &data, CV_TEST_ERROR ), 0 ); //doesn't compute importance
+    if( (int)data->getClassLabels().total() <= 2 ) // regression or 2-class classification problem
+    {
+        printf("======BOOST=====\n");
+        Ptr<Boost> boost = Boost::create(Boost::Params(Boost::GENTLE, 100, 0.95, 2, false, Mat()));
+        train_and_print_errs(boost, data);
     }
-    else
-        printf("File can not be read");
+
+    printf("======RTREES=====\n");
+    Ptr<RTrees> rtrees = RTrees::create(RTrees::Params(10, 2, 0, false, 16, Mat(), false, 0, TermCriteria(TermCriteria::MAX_ITER, 100, 0)));
+    train_and_print_errs(rtrees, data);
 
     return 0;
 }
index 2b4a97d..f261418 100644 (file)
@@ -4,29 +4,29 @@
 #include <opencv2/ml/ml.hpp>
 
 using namespace cv;
+using namespace cv::ml;
 
-int main()
+int main(int, char**)
 {
     // Data for visual representation
     int width = 512, height = 512;
     Mat image = Mat::zeros(height, width, CV_8UC3);
 
     // Set up training data
-    float labels[4] = {1.0, -1.0, -1.0, -1.0};
-    Mat labelsMat(4, 1, CV_32FC1, labels);
+    int labels[4] = {1, -1, -1, -1};
+    Mat labelsMat(4, 1, CV_32SC1, labels);
 
     float trainingData[4][2] = { {501, 10}, {255, 10}, {501, 255}, {10, 501} };
     Mat trainingDataMat(4, 2, CV_32FC1, trainingData);
 
     // Set up SVM's parameters
-    CvSVMParams params;
-    params.svm_type    = CvSVM::C_SVC;
-    params.kernel_type = CvSVM::LINEAR;
-    params.term_crit   = cvTermCriteria(CV_TERMCRIT_ITER, 100, 1e-6);
+    SVM::Params params;
+    params.svmType    = SVM::C_SVC;
+    params.kernelType = SVM::LINEAR;
+    params.termCrit   = TermCriteria(TermCriteria::MAX_ITER, 100, 1e-6);
 
     // Train the SVM
-    CvSVM SVM;
-    SVM.train(trainingDataMat, labelsMat, Mat(), Mat(), params);
+    Ptr<SVM> svm = StatModel::train<SVM>(trainingDataMat, ROW_SAMPLE, labelsMat, params);
 
     Vec3b green(0,255,0), blue (255,0,0);
     // Show the decision regions given by the SVM
@@ -34,30 +34,30 @@ int main()
         for (int j = 0; j < image.cols; ++j)
         {
             Mat sampleMat = (Mat_<float>(1,2) << j,i);
-            float response = SVM.predict(sampleMat);
+            float response = svm->predict(sampleMat);
 
             if (response == 1)
                 image.at<Vec3b>(i,j)  = green;
             else if (response == -1)
-                 image.at<Vec3b>(i,j)  = blue;
+                image.at<Vec3b>(i,j)  = blue;
         }
 
     // Show the training data
     int thickness = -1;
     int lineType = 8;
-    circle(    image, Point(501,  10), 5, Scalar(  0,   0,   0), thickness, lineType);
-    circle(    image, Point(255,  10), 5, Scalar(255, 255, 255), thickness, lineType);
-    circle(    image, Point(501, 255), 5, Scalar(255, 255, 255), thickness, lineType);
-    circle(    image, Point( 10, 501), 5, Scalar(255, 255, 255), thickness, lineType);
+    circle(    image, Point(501,  10), 5, Scalar(  0,   0,   0), thickness, lineType );
+    circle(    image, Point(255,  10), 5, Scalar(255, 255, 255), thickness, lineType );
+    circle(    image, Point(501, 255), 5, Scalar(255, 255, 255), thickness, lineType );
+    circle(    image, Point( 10, 501), 5, Scalar(255, 255, 255), thickness, lineType );
 
     // Show support vectors
     thickness = 2;
     lineType  = 8;
-    int c     = SVM.get_support_vector_count();
+    Mat sv = svm->getSupportVectors();
 
-    for (int i = 0; i < c; ++i)
+    for (int i = 0; i < sv.rows; ++i)
     {
-        const float* v = SVM.get_support_vector(i);
+        const float* v = sv.ptr<float>(i);
         circle(        image,  Point( (int) v[0], (int) v[1]),   6,  Scalar(128, 128, 128), thickness, lineType);
     }
 
index bfab746..3e7cdb3 100644 (file)
@@ -8,6 +8,7 @@
 #define FRAC_LINEAR_SEP                0.9f        // Fraction of samples which compose the linear separable part
 
 using namespace cv;
+using namespace cv::ml;
 using namespace std;
 
 static void help()
@@ -30,7 +31,7 @@ int main()
 
     //--------------------- 1. Set up training data randomly ---------------------------------------
     Mat trainData(2*NTRAINING_SAMPLES, 2, CV_32FC1);
-    Mat labels   (2*NTRAINING_SAMPLES, 1, CV_32FC1);
+    Mat labels   (2*NTRAINING_SAMPLES, 1, CV_32SC1);
 
     RNG rng(100); // Random value generation class
 
@@ -71,16 +72,15 @@ int main()
     labels.rowRange(NTRAINING_SAMPLES, 2*NTRAINING_SAMPLES).setTo(2);  // Class 2
 
     //------------------------ 2. Set up the support vector machines parameters --------------------
-    CvSVMParams params;
-    params.svm_type    = SVM::C_SVC;
+    SVM::Params params;
+    params.svmType    = SVM::C_SVC;
     params.C              = 0.1;
-    params.kernel_type = SVM::LINEAR;
-    params.term_crit   = TermCriteria(CV_TERMCRIT_ITER, (int)1e7, 1e-6);
+    params.kernelType = SVM::LINEAR;
+    params.termCrit   = TermCriteria(TermCriteria::MAX_ITER, (int)1e7, 1e-6);
 
     //------------------------ 3. Train the svm ----------------------------------------------------
     cout << "Starting training process" << endl;
-    CvSVM svm;
-    svm.train(trainData, labels, Mat(), Mat(), params);
+    Ptr<SVM> svm = StatModel::train<SVM>(trainData, ROW_SAMPLE, labels, params);
     cout << "Finished training process" << endl;
 
     //------------------------ 4. Show the decision regions ----------------------------------------
@@ -89,7 +89,7 @@ int main()
         for (int j = 0; j < I.cols; ++j)
         {
             Mat sampleMat = (Mat_<float>(1,2) << i, j);
-            float response = svm.predict(sampleMat);
+            float response = svm->predict(sampleMat);
 
             if      (response == 1)    I.at<Vec3b>(j, i)  = green;
             else if (response == 2)    I.at<Vec3b>(j, i)  = blue;
@@ -117,11 +117,11 @@ int main()
     //------------------------- 6. Show support vectors --------------------------------------------
     thick = 2;
     lineType  = 8;
-    int x     = svm.get_support_vector_count();
+    Mat sv = svm->getSupportVectors();
 
-    for (int i = 0; i < x; ++i)
+    for (int i = 0; i < sv.rows; ++i)
     {
-        const float* v = svm.get_support_vector(i);
+        const float* v = sv.ptr<float>(i);
         circle(        I,  Point( (int) v[0], (int) v[1]), 6, Scalar(128, 128, 128), thick, lineType);
     }